(for PR #27494)
2025-02-24 08:32.09: New job: test owl-opt.0.0.1 with ocamlmod.0.1.0, using opam dev from https://github.com/ocaml/opam-repository.git#refs/pull/27494/head (0def47e7b511023db693d6098b7284568d1f56f0) on debian-12-ocaml-4.14/amd64 To reproduce locally: cd $(mktemp -d) git clone --recursive "https://github.com/ocaml/opam-repository.git" && cd "opam-repository" && git fetch origin "refs/pull/27494/head" && git reset --hard 0def47e7 git fetch origin master git merge --no-edit 4022a684b64be8161a05cf897f492f8680792469 cat > ../Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:debian-12-ocaml-4.14@sha256:74114e6518f67eaaca9dfbd3a0a8e27123f8607d1ac62b00e945d9c187c96098 USER 1000:1000 WORKDIR /home/opam RUN sudo ln -f /usr/bin/opam-dev /usr/bin/opam RUN opam init --reinit -ni RUN opam option solver=builtin-0install && opam config report ENV OPAMDOWNLOADJOBS="1" ENV OPAMERRLOGLEN="0" ENV OPAMPRECISETRACKING="1" ENV CI="true" ENV OPAM_REPO_CI="true" RUN rm -rf opam-repository/ COPY --chown=1000:1000 . opam-repository/ RUN opam repository set-url --strict default opam-repository/ RUN opam update --depexts || true RUN opam pin add -k version -yn ocamlmod.0.1.0 0.1.0 RUN opam reinstall ocamlmod.0.1.0; \ res=$?; \ test "$res" != 31 && exit "$res"; \ export OPAMCLI=2.0; \ build_dir=$(opam var prefix)/.opam-switch/build; \ failed=$(ls "$build_dir"); \ partial_fails=""; \ for pkg in $failed; do \ if opam show -f x-ci-accept-failures: "$pkg" | grep -qF "\"debian-12\""; then \ echo "A package failed and has been disabled for CI using the 'x-ci-accept-failures' field."; \ fi; \ test "$pkg" != 'ocamlmod.0.1.0' && partial_fails="$partial_fails $pkg"; \ done; \ test "${partial_fails}" != "" && echo "opam-repo-ci detected dependencies failing: ${partial_fails}"; \ exit 1 RUN opam reinstall owl-opt.0.0.1; \ res=$?; \ test "$res" != 31 && exit "$res"; \ export OPAMCLI=2.0; \ build_dir=$(opam var prefix)/.opam-switch/build; \ failed=$(ls "$build_dir"); \ partial_fails=""; \ for pkg in $failed; do \ if opam show -f x-ci-accept-failures: "$pkg" | grep -qF "\"debian-12\""; then \ echo "A package failed and has been disabled for CI using the 'x-ci-accept-failures' field."; \ fi; \ test "$pkg" != 'owl-opt.0.0.1' && partial_fails="$partial_fails $pkg"; \ done; \ test "${partial_fails}" != "" && echo "opam-repo-ci detected dependencies failing: ${partial_fails}"; \ exit 1 RUN (opam reinstall --with-test owl-opt.0.0.1) || true RUN opam reinstall --with-test --verbose owl-opt.0.0.1; \ res=$?; \ test "$res" != 31 && exit "$res"; \ export OPAMCLI=2.0; \ build_dir=$(opam var prefix)/.opam-switch/build; \ failed=$(ls "$build_dir"); \ partial_fails=""; \ for pkg in $failed; do \ if opam show -f x-ci-accept-failures: "$pkg" | grep -qF "\"debian-12\""; then \ echo "A package failed and has been disabled for CI using the 'x-ci-accept-failures' field."; \ fi; \ test "$pkg" != 'owl-opt.0.0.1' && partial_fails="$partial_fails $pkg"; \ done; \ test "${partial_fails}" != "" && echo "opam-repo-ci detected dependencies failing: ${partial_fails}"; \ exit 1 END-OF-DOCKERFILE docker build -f ../Dockerfile . 2025-02-24 08:32.09: Using cache hint "ocaml/opam:debian-12-ocaml-4.14@sha256:74114e6518f67eaaca9dfbd3a0a8e27123f8607d1ac62b00e945d9c187c96098-ocamlmod.0.1.0-owl-opt.0.0.1-0def47e7b511023db693d6098b7284568d1f56f0" 2025-02-24 08:32.09: Using OBuilder spec: ((from ocaml/opam:debian-12-ocaml-4.14@sha256:74114e6518f67eaaca9dfbd3a0a8e27123f8607d1ac62b00e945d9c187c96098) (user (uid 1000) (gid 1000)) (workdir /home/opam) (run (shell "sudo ln -f /usr/bin/opam-dev /usr/bin/opam")) (run (network host) (shell "opam init --reinit --config .opamrc-sandbox -ni")) (run (shell "opam option solver=builtin-0install && opam config report")) (env OPAMDOWNLOADJOBS 1) (env OPAMERRLOGLEN 0) (env OPAMPRECISETRACKING 1) (env CI true) (env OPAM_REPO_CI true) (run (shell "rm -rf opam-repository/")) (copy (src .) (dst opam-repository/)) (run (shell "opam repository set-url --strict default opam-repository/")) (run (network host) (shell "opam update --depexts || true")) (run (shell "opam pin add -k version -yn ocamlmod.0.1.0 0.1.0")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam reinstall ocamlmod.0.1.0;\ \n res=$?;\ \n test \"$res\" != 31 && exit \"$res\";\ \n export OPAMCLI=2.0;\ \n build_dir=$(opam var prefix)/.opam-switch/build;\ \n failed=$(ls \"$build_dir\");\ \n partial_fails=\"\";\ \n for pkg in $failed; do\ \n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-12\\\"\"; then\ \n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\ \n fi;\ \n test \"$pkg\" != 'ocamlmod.0.1.0' && partial_fails=\"$partial_fails $pkg\";\ \n done;\ \n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\ \n exit 1")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam reinstall owl-opt.0.0.1;\ \n res=$?;\ \n test \"$res\" != 31 && exit \"$res\";\ \n export OPAMCLI=2.0;\ \n build_dir=$(opam var prefix)/.opam-switch/build;\ \n failed=$(ls \"$build_dir\");\ \n partial_fails=\"\";\ \n for pkg in $failed; do\ \n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-12\\\"\"; then\ \n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\ \n fi;\ \n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\ \n done;\ \n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\ \n exit 1")) (run (network host) (shell "(opam reinstall --with-test owl-opt.0.0.1) || true")) (run (shell "opam reinstall --with-test --verbose owl-opt.0.0.1;\ \n res=$?;\ \n test \"$res\" != 31 && exit \"$res\";\ \n export OPAMCLI=2.0;\ \n build_dir=$(opam var prefix)/.opam-switch/build;\ \n failed=$(ls \"$build_dir\");\ \n partial_fails=\"\";\ \n for pkg in $failed; do\ \n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-12\\\"\"; then\ \n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\ \n fi;\ \n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\ \n done;\ \n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\ \n exit 1")) ) 2025-02-24 08:32.09: Waiting for resource in pool OCluster 2025-02-24 09:11.16: Waiting for worker… 2025-02-24 09:14.01: Got resource from pool OCluster Building on doris.caelum.ci.dev All commits already cached Updating files: 88% (19804/22467) Updating files: 89% (19996/22467) Updating files: 90% (20221/22467) Updating files: 91% (20445/22467) Updating files: 92% (20670/22467) Updating files: 93% (20895/22467) Updating files: 94% (21119/22467) Updating files: 95% (21344/22467) Updating files: 96% (21569/22467) Updating files: 97% (21793/22467) Updating files: 98% (22018/22467) Updating files: 99% (22243/22467) Updating files: 100% (22467/22467) Updating files: 100% (22467/22467), done. HEAD is now at 4022a684b6 Merge pull request #27464 from hannesm/release-crunch-v4.0.0 Updating 4022a684b6..0def47e7b5 Fast-forward packages/ocamlmod/ocamlmod.0.1.0/opam | 37 +++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 packages/ocamlmod/ocamlmod.0.1.0/opam (from ocaml/opam:debian-12-ocaml-4.14@sha256:74114e6518f67eaaca9dfbd3a0a8e27123f8607d1ac62b00e945d9c187c96098) 2025-02-24 09:15.50 ---> using "fd8c1dcac8c3a6cd3ee8edd679de5f3384de62d6f587d449e00238111470ab75" from cache /: (user (uid 1000) (gid 1000)) /: (workdir /home/opam) /home/opam: (run (shell "sudo ln -f /usr/bin/opam-dev /usr/bin/opam")) 2025-02-24 09:15.50 ---> using "63348f49024352af4df51181a4d2b78015b336e78bdb909884d7d69472d901a3" from cache /home/opam: (run (network host) (shell "opam init --reinit --config .opamrc-sandbox -ni")) Configuring from /home/opam/.opamrc-sandbox, then /home/opam/.opamrc, and finally from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Continue? [Y/n] y [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global Format upgrade done. <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive [default] synchronised from file:///home/opam/opam-repository 2025-02-24 09:15.50 ---> using "43a9673a4da24840797129b74017b6eb598863ba09d5d1a9bc3d3bd190bc8651" from cache /home/opam: (run (shell "opam option solver=builtin-0install && opam config report")) Set to 'builtin-0install' the field solver in global configuration # opam config report # opam-version 2.4.0~alpha1~dev (34b7b4ec4af0ebd3c8fc4aee8088471be2ad48c7) # self-upgrade no # system arch=x86_64 os=linux os-distribution=debian os-version=12 # solver builtin-0install # install-criteria -changed,-count[avoid-version,solution] # upgrade-criteria -count[avoid-version,solution] # jobs 255 # repositories 1 (local), 1 (version-controlled) # pinned 1 (version) # current-switch 4.14 # invariant ["ocaml-base-compiler" {= "4.14.2"}] # compiler-packages ocaml-base-compiler.4.14.2, ocaml-options-vanilla.1 # ocaml:native true # ocaml:native-tools true # ocaml:native-dynlink true # ocaml:stubsdir /home/opam/.opam/4.14/lib/ocaml/stublibs:/home/opam/.opam/4.14/lib/ocaml # ocaml:preinstalled false # ocaml:compiler 4.14.2 2025-02-24 09:15.50 ---> using "5b9fc6f723d0065251ab5d52aa7740a229f8f438bf79f559d9f8db36ee599317" from cache /home/opam: (env OPAMDOWNLOADJOBS 1) /home/opam: (env OPAMERRLOGLEN 0) /home/opam: (env OPAMPRECISETRACKING 1) /home/opam: (env CI true) /home/opam: (env OPAM_REPO_CI true) /home/opam: (run (shell "rm -rf opam-repository/")) 2025-02-24 09:15.50 ---> using "a874c49896169ec02e95177c8b5fb923a0e02dbc7bef07316a52b7ca53832295" from cache /home/opam: (copy (src .) (dst opam-repository/)) 2025-02-24 09:15.52 ---> using "4253df3ceb367f320e2e59481694f41945d0aa5a3aff8e84d62ec30597769699" from cache /home/opam: (run (shell "opam repository set-url --strict default opam-repository/")) [default] Initialised 2025-02-24 09:15.52 ---> using "b0d31e1d3e54c8f2cf9229a7a258f86593277d01a3029f618e9e06729f50143c" from cache /home/opam: (run (network host) (shell "opam update --depexts || true")) + /usr/bin/sudo "apt-get" "update" - Hit:1 http://deb.debian.org/debian bookworm InRelease - Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB] - Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB] - Get:4 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [246 kB] - Fetched 349 kB in 0s (766 kB/s) - Reading package lists... 2025-02-24 09:15.52 ---> using "3190819e9a9b503daf97a971adb376054d5dae792297b7f693d4b54899654a95" from cache /home/opam: (run (shell "opam pin add -k version -yn ocamlmod.0.1.0 0.1.0")) ocamlmod is now pinned to version 0.1.0 2025-02-24 09:15.52 ---> using "a1c9a3b28bfa48ebb2cb16d4ad6c5c79f4f4025f4430dedc449bfd06148b496a" from cache /home/opam: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam reinstall ocamlmod.0.1.0;\ \n res=$?;\ \n test \"$res\" != 31 && exit \"$res\";\ \n export OPAMCLI=2.0;\ \n build_dir=$(opam var prefix)/.opam-switch/build;\ \n failed=$(ls \"$build_dir\");\ \n partial_fails=\"\";\ \n for pkg in $failed; do\ \n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-12\\\"\"; then\ \n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\ \n fi;\ \n test \"$pkg\" != 'ocamlmod.0.1.0' && partial_fails=\"$partial_fails $pkg\";\ \n done;\ \n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\ \n exit 1")) ocamlmod.0.1.0 is not installed. Install it? [Y/n] y The following actions will be performed: === install 2 packages - install dune 3.17.2 [required by ocamlmod] - install ocamlmod 0.1.0 (pinned) <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved dune.3.17.2 (cached) -> retrieved ocamlmod.0.1.0 (cached) -> installed dune.3.17.2 -> installed ocamlmod.0.1.0 Done. # To update the current shell environment, run: eval $(opam env) 2025-02-24 09:15.52 ---> using "518b5c4f8da5b692ed3584669396faac7fdc0a48af64716bd9f64828cc9889f6" from cache /home/opam: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam reinstall owl-opt.0.0.1;\ \n res=$?;\ \n test \"$res\" != 31 && exit \"$res\";\ \n export OPAMCLI=2.0;\ \n build_dir=$(opam var prefix)/.opam-switch/build;\ \n failed=$(ls \"$build_dir\");\ \n partial_fails=\"\";\ \n for pkg in $failed; do\ \n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-12\\\"\"; then\ \n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\ \n fi;\ \n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\ \n done;\ \n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\ \n exit 1")) owl-opt.0.0.1 is not installed. Install it? [Y/n] y The following actions will be performed: === install 22 packages - install base v0.16.3 [required by owl, ppx-owl-opt] - install bigarray-compat 1.1.0 [required by ctypes] - install camlzip 1.13 [required by npy] - install conf-bash 1 [required by base] - install conf-openblas 0.2.2 [required by owl] - install conf-pkg-config 4 [required by conf-zlib] - install conf-zlib 1 [required by camlzip] - install csexp 1.5.2 [required by dune-configurator] - install ctypes 0.23.0 [required by owl] - install dune-configurator 3.17.2 [required by owl] - install integers 0.7.0 [required by ctypes] - install npy 0.0.9 [required by owl] - install ocaml-compiler-libs v0.12.4 [required by ppxlib] - install ocamlfind 1.9.8 [required by camlzip] - install owl 1.2 [required by owl-opt] - install owl-base 1.2 [required by owl] - install owl-opt 0.0.1 - install ppx-owl-opt 0.0.1 [required by owl-opt] - install ppx_derivers 1.2.1 [required by ppxlib] - install ppxlib 0.35.0 [required by ppx-owl-opt] - install sexplib0 v0.16.0 [required by base, ppxlib] - install stdlib-shims 0.3.0 [required by ppxlib] The following system packages will first need to be installed: liblapacke-dev libopenblas-dev pkg-config zlib1g-dev <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> opam believes some required external dependencies are missing. opam can: > 1. Run apt-get to install them (may need root/sudo access) 2. Display the recommended apt-get command and wait while you run it manually (e.g. in another terminal) 3. Continue anyway, and, upon success, permanently register that this external dependency is present, but not detectable 4. Abort the installation [1/2/3/4] 1 + /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "liblapacke-dev" "libopenblas-dev" "pkg-config" "zlib1g-dev" - debconf: delaying package configuration, since apt-utils is not installed - Selecting previously unselected package libblas3:amd64. - (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 18745 files and directories currently installed.) - Preparing to unpack .../00-libblas3_3.11.0-2_amd64.deb ... - Unpacking libblas3:amd64 (3.11.0-2) ... - Selecting previously unselected package libblas-dev:amd64. - Preparing to unpack .../01-libblas-dev_3.11.0-2_amd64.deb ... - Unpacking libblas-dev:amd64 (3.11.0-2) ... - Selecting previously unselected package libgfortran5:amd64. - Preparing to unpack .../02-libgfortran5_12.2.0-14_amd64.deb ... - Unpacking libgfortran5:amd64 (12.2.0-14) ... - Selecting previously unselected package libopenblas0-pthread:amd64. - Preparing to unpack .../03-libopenblas0-pthread_0.3.21+ds-4_amd64.deb ... - Unpacking libopenblas0-pthread:amd64 (0.3.21+ds-4) ... - Selecting previously unselected package liblapack3:amd64. - Preparing to unpack .../04-liblapack3_3.11.0-2_amd64.deb ... - Unpacking liblapack3:amd64 (3.11.0-2) ... - Selecting previously unselected package libopenblas-pthread-dev:amd64. - Preparing to unpack .../05-libopenblas-pthread-dev_0.3.21+ds-4_amd64.deb ... - Unpacking libopenblas-pthread-dev:amd64 (0.3.21+ds-4) ... - Selecting previously unselected package liblapack-dev:amd64. - Preparing to unpack .../06-liblapack-dev_3.11.0-2_amd64.deb ... - Unpacking liblapack-dev:amd64 (3.11.0-2) ... - Selecting previously unselected package libtmglib3:amd64. - Preparing to unpack .../07-libtmglib3_3.11.0-2_amd64.deb ... - Unpacking libtmglib3:amd64 (3.11.0-2) ... - Selecting previously unselected package liblapacke:amd64. - Preparing to unpack .../08-liblapacke_3.11.0-2_amd64.deb ... - Unpacking liblapacke:amd64 (3.11.0-2) ... - Selecting previously unselected package libtmglib-dev:amd64. - Preparing to unpack .../09-libtmglib-dev_3.11.0-2_amd64.deb ... - Unpacking libtmglib-dev:amd64 (3.11.0-2) ... - Selecting previously unselected package liblapacke-dev:amd64. - Preparing to unpack .../10-liblapacke-dev_3.11.0-2_amd64.deb ... - Unpacking liblapacke-dev:amd64 (3.11.0-2) ... - Selecting previously unselected package libopenblas0:amd64. - Preparing to unpack .../11-libopenblas0_0.3.21+ds-4_amd64.deb ... - Unpacking libopenblas0:amd64 (0.3.21+ds-4) ... - Selecting previously unselected package libopenblas-dev:amd64. - Preparing to unpack .../12-libopenblas-dev_0.3.21+ds-4_amd64.deb ... - Unpacking libopenblas-dev:amd64 (0.3.21+ds-4) ... - Selecting previously unselected package libpkgconf3:amd64. - Preparing to unpack .../13-libpkgconf3_1.8.1-1_amd64.deb ... - Unpacking libpkgconf3:amd64 (1.8.1-1) ... - Selecting previously unselected package pkgconf-bin. - Preparing to unpack .../14-pkgconf-bin_1.8.1-1_amd64.deb ... - Unpacking pkgconf-bin (1.8.1-1) ... - Selecting previously unselected package pkgconf:amd64. - Preparing to unpack .../15-pkgconf_1.8.1-1_amd64.deb ... - Unpacking pkgconf:amd64 (1.8.1-1) ... - Selecting previously unselected package pkg-config:amd64. - Preparing to unpack .../16-pkg-config_1.8.1-1_amd64.deb ... - Unpacking pkg-config:amd64 (1.8.1-1) ... - Selecting previously unselected package zlib1g-dev:amd64. - Preparing to unpack .../17-zlib1g-dev_1%3a1.2.13.dfsg-1_amd64.deb ... - Unpacking zlib1g-dev:amd64 (1:1.2.13.dfsg-1) ... - Setting up libblas3:amd64 (3.11.0-2) ... - update-alternatives: using /usr/lib/x86_64-linux-gnu/blas/libblas.so.3 to provide /usr/lib/x86_64-linux-gnu/libblas.so.3 (libblas.so.3-x86_64-linux-gnu) in auto mode - Setting up libpkgconf3:amd64 (1.8.1-1) ... - Setting up pkgconf-bin (1.8.1-1) ... - Setting up libgfortran5:amd64 (12.2.0-14) ... - Setting up zlib1g-dev:amd64 (1:1.2.13.dfsg-1) ... - Setting up libblas-dev:amd64 (3.11.0-2) ... - update-alternatives: using /usr/lib/x86_64-linux-gnu/blas/libblas.so to provide /usr/lib/x86_64-linux-gnu/libblas.so (libblas.so-x86_64-linux-gnu) in auto mode - Setting up liblapack3:amd64 (3.11.0-2) ... - update-alternatives: using /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3 to provide /usr/lib/x86_64-linux-gnu/liblapack.so.3 (liblapack.so.3-x86_64-linux-gnu) in auto mode - Setting up libopenblas0-pthread:amd64 (0.3.21+ds-4) ... - update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 to provide /usr/lib/x86_64-linux-gnu/libblas.so.3 (libblas.so.3-x86_64-linux-gnu) in auto mode - update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/liblapack.so.3 to provide /usr/lib/x86_64-linux-gnu/liblapack.so.3 (liblapack.so.3-x86_64-linux-gnu) in auto mode - update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblas.so.0 to provide /usr/lib/x86_64-linux-gnu/libopenblas.so.0 (libopenblas.so.0-x86_64-linux-gnu) in auto mode - Setting up pkgconf:amd64 (1.8.1-1) ... - Setting up libtmglib3:amd64 (3.11.0-2) ... - Setting up liblapack-dev:amd64 (3.11.0-2) ... - update-alternatives: using /usr/lib/x86_64-linux-gnu/lapack/liblapack.so to provide /usr/lib/x86_64-linux-gnu/liblapack.so (liblapack.so-x86_64-linux-gnu) in auto mode - Setting up pkg-config:amd64 (1.8.1-1) ... - Setting up libopenblas0:amd64 (0.3.21+ds-4) ... - Setting up liblapacke:amd64 (3.11.0-2) ... - Setting up libtmglib-dev:amd64 (3.11.0-2) ... - Setting up libopenblas-pthread-dev:amd64 (0.3.21+ds-4) ... - update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so to provide /usr/lib/x86_64-linux-gnu/libblas.so (libblas.so-x86_64-linux-gnu) in auto mode - update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/liblapack.so to provide /usr/lib/x86_64-linux-gnu/liblapack.so (liblapack.so-x86_64-linux-gnu) in auto mode - update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblas.so to provide /usr/lib/x86_64-linux-gnu/libopenblas.so (libopenblas.so-x86_64-linux-gnu) in auto mode - Setting up liblapacke-dev:amd64 (3.11.0-2) ... - Setting up libopenblas-dev:amd64 (0.3.21+ds-4) ... - Processing triggers for libc-bin (2.36-9+deb12u9) ... <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved base.v0.16.3 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved camlzip.1.13 (cached) -> retrieved conf-openblas.0.2.2 (cached) -> retrieved csexp.1.5.2 (cached) -> retrieved ctypes.0.23.0 (cached) -> installed conf-bash.1 -> installed conf-pkg-config.4 -> installed conf-zlib.1 -> installed conf-openblas.0.2.2 -> installed bigarray-compat.1.1.0 -> installed csexp.1.5.2 -> retrieved dune-configurator.3.17.2 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved npy.0.0.9 (cached) -> retrieved ocaml-compiler-libs.v0.12.4 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> installed ocaml-compiler-libs.v0.12.4 -> retrieved owl.1.2, owl-base.1.2 (cached) -> installed dune-configurator.3.17.2 -> retrieved owl-opt.0.0.1, ppx-owl-opt.0.0.1 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved sexplib0.v0.16.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> installed ppx_derivers.1.2.1 -> installed stdlib-shims.0.3.0 -> installed sexplib0.v0.16.0 -> installed integers.0.7.0 -> installed ocamlfind.1.9.8 -> installed camlzip.1.13 -> installed owl-base.1.2 -> installed npy.0.0.9 -> installed base.v0.16.3 -> installed ctypes.0.23.0 -> installed ppxlib.0.35.0 -> installed ppx-owl-opt.0.0.1 -> installed owl.1.2 -> installed owl-opt.0.0.1 Done. # To update the current shell environment, run: eval $(opam env) 2025-02-24 09:17.50 ---> saved as "5ac671c42a2a856db40959c53eb131d24d84b9b1f8f7bcc55f15bdb11f21a226" /home/opam: (run (network host) (shell "(opam reinstall --with-test owl-opt.0.0.1) || true")) The following actions will be performed: === recompile 1 package - recompile owl-opt 0.0.1 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved owl-opt.0.0.1 (https://github.com/owlbarn/owl_opt/archive/v0.0.1.tar.gz) -> removed owl-opt.0.0.1 -> installed owl-opt.0.0.1 Done. # To update the current shell environment, run: eval $(opam env) 2025-02-24 09:18.02 ---> saved as "44ff36ea687953f6ed8dfeb7a58555c8e18eb5cd8cd357dc693475559cb37273" /home/opam: (run (shell "opam reinstall --with-test --verbose owl-opt.0.0.1;\ \n res=$?;\ \n test \"$res\" != 31 && exit \"$res\";\ \n export OPAMCLI=2.0;\ \n build_dir=$(opam var prefix)/.opam-switch/build;\ \n failed=$(ls \"$build_dir\");\ \n partial_fails=\"\";\ \n for pkg in $failed; do\ \n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-12\\\"\"; then\ \n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\ \n fi;\ \n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\ \n done;\ \n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\ \n exit 1")) The following actions will be performed: === recompile 1 package - recompile owl-opt 0.0.1 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> Processing 1/4: [owl-opt.0.0.1: extract] -> retrieved owl-opt.0.0.1 (cached) Processing 2/4: [owl-opt: dune build] + /home/opam/.opam/opam-init/hooks/sandbox.sh "build" "dune" "build" "-p" "owl-opt" "-j" "255" (CWD=/home/opam/.opam/4.14/.opam-switch/build/owl-opt.0.0.1) - File "dune-project", line 2, characters 11-14: - 2 | (using fmt 1.1) - ^^^ - Warning: Version 1.1 of integration with automatic formatters is not - supported until version 1.7 of the dune language. - Supported versions of this extension in version 1.5 of the dune language: - - 1.0 Processing 2/4: [owl-opt: dune runtest] + /home/opam/.opam/opam-init/hooks/sandbox.sh "build" "dune" "runtest" "examples/opt" "-p" "owl-opt" "-j" "255" (CWD=/home/opam/.opam/4.14/.opam-switch/build/owl-opt.0.0.1) - File "dune-project", line 2, characters 11-14: - 2 | (using fmt 1.1) - ^^^ - Warning: Version 1.1 of integration with automatic formatters is not - supported until version 1.7 of the dune language. - Supported versions of this extension in version 1.5 of the dune language: - - 1.0 - (cd _build/default/examples/opt && ./gd.exe) - iter: 0 | loss: 4.456602 iter: 1 | loss: 4.456602 iter: 2 | loss: 4.456300 iter: 3 | loss: 4.455999 iter: 4 | loss: 4.455697 iter: 5 | loss: 4.455396 iter: 6 | loss: 4.455094 iter: 7 | loss: 4.454793 iter: 8 | loss: 4.454491 iter: 9 | loss: 4.454190 iter: 10 | loss: 4.453888 iter: 11 | loss: 4.453587 iter: 12 | loss: 4.453285 iter: 13 | loss: 4.452984 iter: 14 | loss: 4.452682 iter: 15 | loss: 4.452381 iter: 16 | loss: 4.452079 iter: 17 | loss: 4.451778 iter: 18 | loss: 4.451476 iter: 19 | loss: 4.451175 iter: 20 | loss: 4.450873 iter: 21 | loss: 4.450572 iter: 22 | loss: 4.450270 iter: 23 | loss: 4.449969 iter: 24 | loss: 4.449667 iter: 25 | loss: 4.449366 iter: 26 | loss: 4.449064 iter: 27 | loss: 4.448763 iter: 28 | loss: 4.448461 iter: 29 | loss: 4.448160 iter: 30 | loss: 4.447858 iter: 31 | loss: 4.447557 iter: 32 | loss: 4.447255 iter: 33 | loss: 4.446954 iter: 34 | loss: 4.446652 iter: 35 | loss: 4.446351 iter: 36 | loss: 4.446049 iter: 37 | loss: 4.445748 iter: 38 | loss: 4.445446 iter: 39 | loss: 4.445144 iter: 40 | loss: 4.444843 iter: 41 | loss: 4.444541 iter: 42 | loss: 4.444240 iter: 43 | loss: 4.443938 iter: 44 | loss: 4.443637 iter: 45 | loss: 4.443335 iter: 46 | loss: 4.443034 iter: 47 | loss: 4.442732 iter: 48 | loss: 4.442431 iter: 49 | loss: 4.442129 iter: 50 | loss: 4.441828 iter: 51 | loss: 4.441526 iter: 52 | loss: 4.441225 iter: 53 | loss: 4.440923 iter: 54 | loss: 4.440622 iter: 55 | loss: 4.440320 iter: 56 | loss: 4.440019 iter: 57 | loss: 4.439717 iter: 58 | loss: 4.439416 iter: 59 | loss: 4.439114 iter: 60 | loss: 4.438813 iter: 61 | loss: 4.438511 iter: 62 | loss: 4.438210 iter: 63 | loss: 4.437908 iter: 64 | loss: 4.437607 iter: 65 | loss: 4.437305 iter: 66 | loss: 4.437004 iter: 67 | loss: 4.436702 iter: 68 | loss: 4.436401 iter: 69 | loss: 4.436099 iter: 70 | loss: 4.435798 iter: 71 | loss: 4.435496 iter: 72 | loss: 4.435195 iter: 73 | loss: 4.434893 iter: 74 | loss: 4.434592 iter: 75 | loss: 4.434290 iter: 76 | loss: 4.433989 iter: 77 | loss: 4.433687 iter: 78 | loss: 4.433386 iter: 79 | loss: 4.433084 iter: 80 | loss: 4.432783 iter: 81 | loss: 4.432481 iter: 82 | loss: 4.432180 iter: 83 | loss: 4.431878 iter: 84 | loss: 4.431577 iter: 85 | loss: 4.431275 iter: 86 | loss: 4.430974 iter: 87 | loss: 4.430672 iter: 88 | loss: 4.430371 iter: 89 | loss: 4.430069 iter: 90 | loss: 4.429767 iter: 91 | loss: 4.429466 iter: 92 | loss: 4.429164 iter: 93 | loss: 4.428863 iter: 94 | loss: 4.428561 iter: 95 | loss: 4.428260 iter: 96 | loss: 4.427958 iter: 97 | loss: 4.427657 iter: 98 | loss: 4.427355 iter: 99 | loss: 4.427054 iter: 100 | loss: 4.426752 iter: 101 | loss: 4.426451 iter: 102 | loss: 4.426149 iter: 103 | loss: 4.425848 iter: 104 | loss: 4.425546 iter: 105 | loss: 4.425245 iter: 106 | loss: 4.424943 iter: 107 | loss: 4.424642 iter: 108 | loss: 4.424340 iter: 109 | loss: 4.424039 iter: 110 | loss: 4.423737 iter: 111 | loss: 4.423436 iter: 112 | loss: 4.423134 iter: 113 | loss: 4.422833 iter: 114 | loss: 4.422531 iter: 115 | loss: 4.422230 iter: 116 | loss: 4.421928 iter: 117 | loss: 4.421627 iter: 118 | loss: 4.421325 iter: 119 | loss: 4.421024 iter: 120 | loss: 4.420722 iter: 121 | loss: 4.420421 iter: 122 | loss: 4.420119 iter: 123 | loss: 4.419818 iter: 124 | loss: 4.419516 iter: 125 | loss: 4.419215 iter: 126 | loss: 4.418913 iter: 127 | loss: 4.418612 iter: 128 | loss: 4.418310 iter: 129 | loss: 4.418009 iter: 130 | loss: 4.417707 iter: 131 | loss: 4.417406 iter: 132 | loss: 4.417104 iter: 133 | loss: 4.416803 iter: 134 | loss: 4.416501 iter: 135 | loss: 4.416200 iter: 136 | loss: 4.415898 iter: 137 | loss: 4.415597 iter: 138 | loss: 4.415295 iter: 139 | loss: 4.414994 iter: 140 | loss: 4.414692 iter: 141 | loss: 4.414391 iter: 142 | loss: 4.414089 iter: 143 | loss: 4.413787 iter: 144 | loss: 4.413486 iter: 145 | loss: 4.413184 iter: 146 | loss: 4.412883 iter: 147 | loss: 4.412581 iter: 148 | loss: 4.412280 iter: 149 | loss: 4.411978 iter: 150 | loss: 4.411677 iter: 151 | loss: 4.411375 iter: 152 | loss: 4.411074 iter: 153 | loss: 4.410772 iter: 154 | loss: 4.410471 iter: 155 | loss: 4.410169 iter: 156 | loss: 4.409868 iter: 157 | loss: 4.409566 iter: 158 | loss: 4.409265 iter: 159 | loss: 4.408963 iter: 160 | loss: 4.408662 iter: 161 | loss: 4.408360 iter: 162 | loss: 4.408059 iter: 163 | loss: 4.407757 iter: 164 | loss: 4.407456 iter: 165 | loss: 4.407154 iter: 166 | loss: 4.406853 iter: 167 | loss: 4.406551 iter: 168 | loss: 4.406250 iter: 169 | loss: 4.405948 iter: 170 | loss: 4.405647 iter: 171 | loss: 4.405345 iter: 172 | loss: 4.405044 iter: 173 | loss: 4.404742 iter: 174 | loss: 4.404441 iter: 175 | loss: 4.404139 iter: 176 | loss: 4.403838 iter: 177 | loss: 4.403536 iter: 178 | loss: 4.403235 iter: 179 | loss: 4.402933 iter: 180 | loss: 4.402632 iter: 181 | loss: 4.402330 iter: 182 | loss: 4.402029 iter: 183 | loss: 4.401727 iter: 184 | loss: 4.401426 iter: 185 | loss: 4.401124 iter: 186 | loss: 4.400823 iter: 187 | loss: 4.400521 iter: 188 | loss: 4.400220 iter: 189 | loss: 4.399918 iter: 190 | loss: 4.399617 iter: 191 | loss: 4.399315 iter: 192 | loss: 4.399014 iter: 193 | loss: 4.398712 iter: 194 | loss: 4.398410 iter: 195 | loss: 4.398109 iter: 196 | loss: 4.397807 iter: 197 | loss: 4.397506 iter: 198 | loss: 4.397204 iter: 199 | loss: 4.396903 iter: 200 | loss: 4.396601 iter: 201 | loss: 4.396300 iter: 202 | loss: 4.395998 iter: 203 | loss: 4.395697 iter: 204 | loss: 4.395395 iter: 205 | loss: 4.395094 iter: 206 | loss: 4.394792 iter: 207 | loss: 4.394491 iter: 208 | loss: 4.394189 iter: 209 | loss: 4.393888 iter: 210 | loss: 4.393586 iter: 211 | loss: 4.393285 iter: 212 | loss: 4.392983 iter: 213 | loss: 4.392682 iter: 214 | loss: 4.392380 iter: 215 | loss: 4.392079 iter: 216 | loss: 4.391777 iter: 217 | loss: 4.391476 iter: 218 | loss: 4.391174 iter: 219 | loss: 4.390873 iter: 220 | loss: 4.390571 iter: 221 | loss: 4.390270 iter: 222 | loss: 4.389968 iter: 223 | loss: 4.389667 iter: 224 | loss: 4.389365 iter: 225 | loss: 4.389064 iter: 226 | loss: 4.388762 iter: 227 | loss: 4.388461 iter: 228 | loss: 4.388159 iter: 229 | loss: 4.387858 iter: 230 | loss: 4.387556 iter: 231 | loss: 4.387255 iter: 232 | loss: 4.386953 iter: 233 | loss: 4.386652 iter: 234 | loss: 4.386350 iter: 235 | loss: 4.386049 iter: 236 | loss: 4.385747 iter: 237 | loss: 4.385446 iter: 238 | loss: 4.385144 iter: 239 | loss: 4.384843 iter: 240 | loss: 4.384541 iter: 241 | loss: 4.384240 iter: 242 | loss: 4.383938 iter: 243 | loss: 4.383637 iter: 244 | loss: 4.383335 iter: 245 | loss: 4.383034 iter: 246 | loss: 4.382732 iter: 247 | loss: 4.382430 iter: 248 | loss: 4.382129 iter: 249 | loss: 4.381827 iter: 250 | loss: 4.381526 iter: 251 | loss: 4.381224 iter: 252 | loss: 4.380923 iter: 253 | loss: 4.380621 iter: 254 | loss: 4.380320 iter: 255 | loss: 4.380018 iter: 256 | loss: 4.379717 iter: 257 | loss: 4.379415 iter: 258 | loss: 4.379114 iter: 259 | loss: 4.378812 iter: 260 | loss: 4.378511 iter: 261 | loss: 4.378209 iter: 262 | loss: 4.377908 iter: 263 | loss: 4.377606 iter: 264 | loss: 4.377305 iter: 265 | loss: 4.377003 iter: 266 | loss: 4.376702 iter: 267 | loss: 4.376400 iter: 268 | loss: 4.376099 iter: 269 | loss: 4.375797 iter: 270 | loss: 4.375496 iter: 271 | loss: 4.375194 iter: 272 | loss: 4.374893 iter: 273 | loss: 4.374591 iter: 274 | loss: 4.374290 iter: 275 | loss: 4.373988 iter: 276 | loss: 4.373687 iter: 277 | loss: 4.373385 iter: 278 | loss: 4.373084 iter: 279 | loss: 4.372782 iter: 280 | loss: 4.372481 iter: 281 | loss: 4.372179 iter: 282 | loss: 4.371878 iter: 283 | loss: 4.371576 iter: 284 | loss: 4.371275 iter: 285 | loss: 4.370973 iter: 286 | loss: 4.370672 iter: 287 | loss: 4.370370 iter: 288 | loss: 4.370069 iter: 289 | loss: 4.369767 iter: 290 | loss: 4.369466 iter: 291 | loss: 4.369164 iter: 292 | loss: 4.368863 iter: 293 | loss: 4.368561 iter: 294 | loss: 4.368260 iter: 295 | loss: 4.367958 iter: 296 | loss: 4.367657 iter: 297 | loss: 4.367355 iter: 298 | loss: 4.367054 iter: 299 | loss: 4.366752 iter: 300 | loss: 4.366450 iter: 301 | loss: 4.366149 iter: 302 | loss: 4.365847 iter: 303 | loss: 4.365546 iter: 304 | loss: 4.365244 iter: 305 | loss: 4.364943 iter: 306 | loss: 4.364641 iter: 307 | loss: 4.364340 iter: 308 | loss: 4.364038 iter: 309 | loss: 4.363737 iter: 310 | loss: 4.363435 iter: 311 | loss: 4.363134 iter: 312 | loss: 4.362832 iter: 313 | loss: 4.362531 iter: 314 | loss: 4.362229 iter: 315 | loss: 4.361928 iter: 316 | loss: 4.361626 iter: 317 | loss: 4.361325 iter: 318 | loss: 4.361023 iter: 319 | loss: 4.360722 iter: 320 | loss: 4.360420 iter: 321 | loss: 4.360119 iter: 322 | loss: 4.359817 iter: 323 | loss: 4.359516 iter: 324 | loss: 4.359214 iter: 325 | loss: 4.358913 iter: 326 | loss: 4.358611 iter: 327 | loss: 4.358310 iter: 328 | loss: 4.358008 iter: 329 | loss: 4.357707 iter: 330 | loss: 4.357405 iter: 331 | loss: 4.357104 iter: 332 | loss: 4.356802 iter: 333 | loss: 4.356501 iter: 334 | loss: 4.356199 iter: 335 | loss: 4.355898 iter: 336 | loss: 4.355596 iter: 337 | loss: 4.355295 iter: 338 | loss: 4.354993 iter: 339 | loss: 4.354692 iter: 340 | loss: 4.354390 iter: 341 | loss: 4.354089 iter: 342 | loss: 4.353787 iter: 343 | loss: 4.353486 iter: 344 | loss: 4.353184 iter: 345 | loss: 4.352883 iter: 346 | loss: 4.352581 iter: 347 | loss: 4.352280 iter: 348 | loss: 4.351978 iter: 349 | loss: 4.351677 iter: 350 | loss: 4.351375 iter: 351 | loss: 4.351073 iter: 352 | loss: 4.350772 iter: 353 | loss: 4.350470 iter: 354 | loss: 4.350169 iter: 355 | loss: 4.349867 iter: 356 | loss: 4.349566 iter: 357 | loss: 4.349264 iter: 358 | loss: 4.348963 iter: 359 | loss: 4.348661 iter: 360 | loss: 4.348360 iter: 361 | loss: 4.348058 iter: 362 | loss: 4.347757 iter: 363 | loss: 4.347455 iter: 364 | loss: 4.347154 iter: 365 | loss: 4.346852 iter: 366 | loss: 4.346551 iter: 367 | loss: 4.346249 iter: 368 | loss: 4.345948 iter: 369 | loss: 4.345646 iter: 370 | loss: 4.345345 iter: 371 | loss: 4.345043 iter: 372 | loss: 4.344742 iter: 373 | loss: 4.344440 iter: 374 | loss: 4.344139 iter: 375 | loss: 4.343837 iter: 376 | loss: 4.343536 iter: 377 | loss: 4.343234 iter: 378 | loss: 4.342933 iter: 379 | loss: 4.342631 iter: 380 | loss: 4.342330 iter: 381 | loss: 4.342028 iter: 382 | loss: 4.341727 iter: 383 | loss: 4.341425 iter: 384 | loss: 4.341124 iter: 385 | loss: 4.340822 iter: 386 | loss: 4.340521 iter: 387 | loss: 4.340219 iter: 388 | loss: 4.339918 iter: 389 | loss: 4.339616 iter: 390 | loss: 4.339315 iter: 391 | loss: 4.339013 iter: 392 | loss: 4.338712 iter: 393 | loss: 4.338410 iter: 394 | loss: 4.338109 iter: 395 | loss: 4.337807 iter: 396 | loss: 4.337506 iter: 397 | loss: 4.337204 iter: 398 | loss: 4.336903 iter: 399 | loss: 4.336601 iter: 400 | loss: 4.336300 iter: 401 | loss: 4.335998 iter: 402 | loss: 4.335697 iter: 403 | loss: 4.335395 iter: 404 | loss: 4.335093 iter: 405 | loss: 4.334792 iter: 406 | loss: 4.334490 iter: 407 | loss: 4.334189 iter: 408 | loss: 4.333887 iter: 409 | loss: 4.333586 iter: 410 | loss: 4.333284 iter: 411 | loss: 4.332983 iter: 412 | loss: 4.332681 iter: 413 | loss: 4.332380 iter: 414 | loss: 4.332078 iter: 415 | loss: 4.331777 iter: 416 | loss: 4.331475 iter: 417 | loss: 4.331174 iter: 418 | loss: 4.330872 iter: 419 | loss: 4.330571 iter: 420 | loss: 4.330269 iter: 421 | loss: 4.329968 iter: 422 | loss: 4.329666 iter: 423 | loss: 4.329365 iter: 424 | loss: 4.329063 iter: 425 | loss: 4.328762 iter: 426 | loss: 4.328460 iter: 427 | loss: 4.328159 iter: 428 | loss: 4.327857 iter: 429 | loss: 4.327556 iter: 430 | loss: 4.327254 iter: 431 | loss: 4.326953 iter: 432 | loss: 4.326651 iter: 433 | loss: 4.326350 iter: 434 | loss: 4.326048 iter: 435 | loss: 4.325747 iter: 436 | loss: 4.325445 iter: 437 | loss: 4.325144 iter: 438 | loss: 4.324842 iter: 439 | loss: 4.324541 iter: 440 | loss: 4.324239 iter: 441 | loss: 4.323938 iter: 442 | loss: 4.323636 iter: 443 | loss: 4.323335 iter: 444 | loss: 4.323033 iter: 445 | loss: 4.322732 iter: 446 | loss: 4.322430 iter: 447 | loss: 4.322129 iter: 448 | loss: 4.321827 iter: 449 | loss: 4.321526 iter: 450 | loss: 4.321224 iter: 451 | loss: 4.320923 iter: 452 | loss: 4.320621 iter: 453 | loss: 4.320320 iter: 454 | loss: 4.320018 iter: 455 | loss: 4.319716 iter: 456 | loss: 4.319415 iter: 457 | loss: 4.319113 iter: 458 | loss: 4.318812 iter: 459 | loss: 4.318510 iter: 460 | loss: 4.318209 iter: 461 | loss: 4.317907 iter: 462 | loss: 4.317606 iter: 463 | loss: 4.317304 iter: 464 | loss: 4.317003 iter: 465 | loss: 4.316701 iter: 466 | loss: 4.316400 iter: 467 | loss: 4.316098 iter: 468 | loss: 4.315797 iter: 469 | loss: 4.315495 iter: 470 | loss: 4.315194 iter: 471 | loss: 4.314892 iter: 472 | loss: 4.314591 iter: 473 | loss: 4.314289 iter: 474 | loss: 4.313988 iter: 475 | loss: 4.313686 iter: 476 | loss: 4.313385 iter: 477 | loss: 4.313083 iter: 478 | loss: 4.312782 iter: 479 | loss: 4.312480 iter: 480 | loss: 4.312179 iter: 481 | loss: 4.311877 iter: 482 | loss: 4.311576 iter: 483 | loss: 4.311274 iter: 484 | loss: 4.310973 iter: 485 | loss: 4.310671 iter: 486 | loss: 4.310370 iter: 487 | loss: 4.310068 iter: 488 | loss: 4.309767 iter: 489 | loss: 4.309465 iter: 490 | loss: 4.309164 iter: 491 | loss: 4.308862 iter: 492 | loss: 4.308561 iter: 493 | loss: 4.308259 iter: 494 | loss: 4.307958 iter: 495 | loss: 4.307656 iter: 496 | loss: 4.307355 iter: 497 | loss: 4.307053 iter: 498 | loss: 4.306752 iter: 499 | loss: 4.306450 iter: 500 | loss: 4.306149 iter: 501 | loss: 4.305847 iter: 502 | loss: 4.305546 iter: 503 | loss: 4.305244 iter: 504 | loss: 4.304943 iter: 505 | loss: 4.304641 iter: 506 | loss: 4.304340 iter: 507 | loss: 4.304038 iter: 508 | loss: 4.303736 iter: 509 | loss: 4.303435 iter: 510 | loss: 4.303133 iter: 511 | loss: 4.302832 iter: 512 | loss: 4.302530 iter: 513 | loss: 4.302229 iter: 514 | loss: 4.301927 iter: 515 | loss: 4.301626 iter: 516 | loss: 4.301324 iter: 517 | loss: 4.301023 iter: 518 | loss: 4.300721 iter: 519 | loss: 4.300420 iter: 520 | loss: 4.300118 iter: 521 | loss: 4.299817 iter: 522 | loss: 4.299515 iter: 523 | loss: 4.299214 iter: 524 | loss: 4.298912 iter: 525 | loss: 4.298611 iter: 526 | loss: 4.298309 iter: 527 | loss: 4.298008 iter: 528 | loss: 4.297706 iter: 529 | loss: 4.297405 iter: 530 | loss: 4.297103 iter: 531 | loss: 4.296802 iter: 532 | loss: 4.296500 iter: 533 | loss: 4.296199 iter: 534 | loss: 4.295897 iter: 535 | loss: 4.295596 iter: 536 | loss: 4.295294 iter: 537 | loss: 4.294993 iter: 538 | loss: 4.294691 iter: 539 | loss: 4.294390 iter: 540 | loss: 4.294088 iter: 541 | loss: 4.293787 iter: 542 | loss: 4.293485 iter: 543 | loss: 4.293184 iter: 544 | loss: 4.292882 iter: 545 | loss: 4.292581 iter: 546 | loss: 4.292279 iter: 547 | loss: 4.291978 iter: 548 | loss: 4.291676 iter: 549 | loss: 4.291375 iter: 550 | loss: 4.291073 iter: 551 | loss: 4.290772 iter: 552 | loss: 4.290470 iter: 553 | loss: 4.290169 iter: 554 | loss: 4.289867 iter: 555 | loss: 4.289566 iter: 556 | loss: 4.289264 iter: 557 | loss: 4.288963 iter: 558 | loss: 4.288661 iter: 559 | loss: 4.288359 iter: 560 | loss: 4.288058 iter: 561 | loss: 4.287756 iter: 562 | loss: 4.287455 iter: 563 | loss: 4.287153 iter: 564 | loss: 4.286852 iter: 565 | loss: 4.286550 iter: 566 | loss: 4.286249 iter: 567 | loss: 4.285947 iter: 568 | loss: 4.285646 iter: 569 | loss: 4.285344 iter: 570 | loss: 4.285043 iter: 571 | loss: 4.284741 iter: 572 | loss: 4.284440 iter: 573 | loss: 4.284138 iter: 574 | loss: 4.283837 iter: 575 | loss: 4.283535 iter: 576 | loss: 4.283234 iter: 577 | loss: 4.282932 iter: 578 | loss: 4.282631 iter: 579 | loss: 4.282329 iter: 580 | loss: 4.282028 iter: 581 | loss: 4.281726 iter: 582 | loss: 4.281425 iter: 583 | loss: 4.281123 iter: 584 | loss: 4.280822 iter: 585 | loss: 4.280520 iter: 586 | loss: 4.280219 iter: 587 | loss: 4.279917 iter: 588 | loss: 4.279616 iter: 589 | loss: 4.279314 iter: 590 | loss: 4.279013 iter: 591 | loss: 4.278711 iter: 592 | loss: 4.278410 iter: 593 | loss: 4.278108 iter: 594 | loss: 4.277807 iter: 595 | loss: 4.277505 iter: 596 | loss: 4.277204 iter: 597 | loss: 4.276902 iter: 598 | loss: 4.276601 iter: 599 | loss: 4.276299 iter: 600 | loss: 4.275998 iter: 601 | loss: 4.275696 iter: 602 | loss: 4.275395 iter: 603 | loss: 4.275093 iter: 604 | loss: 4.274792 iter: 605 | loss: 4.274490 iter: 606 | loss: 4.274189 iter: 607 | loss: 4.273887 iter: 608 | loss: 4.273586 iter: 609 | loss: 4.273284 iter: 610 | loss: 4.272983 iter: 611 | loss: 4.272681 iter: 612 | loss: 4.272379 iter: 613 | loss: 4.272078 iter: 614 | loss: 4.271776 iter: 615 | loss: 4.271475 iter: 616 | loss: 4.271173 iter: 617 | loss: 4.270872 iter: 618 | loss: 4.270570 iter: 619 | loss: 4.270269 iter: 620 | loss: 4.269967 iter: 621 | loss: 4.269666 iter: 622 | loss: 4.269364 iter: 623 | loss: 4.269063 iter: 624 | loss: 4.268761 iter: 625 | loss: 4.268460 iter: 626 | loss: 4.268158 iter: 627 | loss: 4.267857 iter: 628 | loss: 4.267555 iter: 629 | loss: 4.267254 iter: 630 | loss: 4.266952 iter: 631 | loss: 4.266651 iter: 632 | loss: 4.266349 iter: 633 | loss: 4.266048 iter: 634 | loss: 4.265746 iter: 635 | loss: 4.265445 iter: 636 | loss: 4.265143 iter: 637 | loss: 4.264842 iter: 638 | loss: 4.264540 iter: 639 | loss: 4.264239 iter: 640 | loss: 4.263937 iter: 641 | loss: 4.263636 iter: 642 | loss: 4.263334 iter: 643 | loss: 4.263033 iter: 644 | loss: 4.262731 iter: 645 | loss: 4.262430 iter: 646 | loss: 4.262128 iter: 647 | loss: 4.261827 iter: 648 | loss: 4.261525 iter: 649 | loss: 4.261224 iter: 650 | loss: 4.260922 iter: 651 | loss: 4.260621 iter: 652 | loss: 4.260319 iter: 653 | loss: 4.260018 iter: 654 | loss: 4.259716 iter: 655 | loss: 4.259415 iter: 656 | loss: 4.259113 iter: 657 | loss: 4.258812 iter: 658 | loss: 4.258510 iter: 659 | loss: 4.258209 iter: 660 | loss: 4.257907 iter: 661 | loss: 4.257606 iter: 662 | loss: 4.257304 iter: 663 | loss: 4.257003 iter: 664 | loss: 4.256701 iter: 665 | loss: 4.256399 iter: 666 | loss: 4.256098 iter: 667 | loss: 4.255796 iter: 668 | loss: 4.255495 iter: 669 | loss: 4.255193 iter: 670 | loss: 4.254892 iter: 671 | loss: 4.254590 iter: 672 | loss: 4.254289 iter: 673 | loss: 4.253987 iter: 674 | loss: 4.253686 iter: 675 | loss: 4.253384 iter: 676 | loss: 4.253083 iter: 677 | loss: 4.252781 iter: 678 | loss: 4.252480 iter: 679 | loss: 4.252178 iter: 680 | loss: 4.251877 iter: 681 | loss: 4.251575 iter: 682 | loss: 4.251274 iter: 683 | loss: 4.250972 iter: 684 | loss: 4.250671 iter: 685 | loss: 4.250369 iter: 686 | loss: 4.250068 iter: 687 | loss: 4.249766 iter: 688 | loss: 4.249465 iter: 689 | loss: 4.249163 iter: 690 | loss: 4.248862 iter: 691 | loss: 4.248560 iter: 692 | loss: 4.248259 iter: 693 | loss: 4.247957 iter: 694 | loss: 4.247656 iter: 695 | loss: 4.247354 iter: 696 | loss: 4.247053 iter: 697 | loss: 4.246751 iter: 698 | loss: 4.246450 iter: 699 | loss: 4.246148 iter: 700 | loss: 4.245847 iter: 701 | loss: 4.245545 iter: 702 | loss: 4.245244 iter: 703 | loss: 4.244942 iter: 704 | loss: 4.244641 iter: 705 | loss: 4.244339 iter: 706 | loss: 4.244038 iter: 707 | loss: 4.243736 iter: 708 | loss: 4.243435 iter: 709 | loss: 4.243133 iter: 710 | loss: 4.242832 iter: 711 | loss: 4.242530 iter: 712 | loss: 4.242229 iter: 713 | loss: 4.241927 iter: 714 | loss: 4.241626 iter: 715 | loss: 4.241324 iter: 716 | loss: 4.241022 iter: 717 | loss: 4.240721 iter: 718 | loss: 4.240419 iter: 719 | loss: 4.240118 iter: 720 | loss: 4.239816 iter: 721 | loss: 4.239515 iter: 722 | loss: 4.239213 iter: 723 | loss: 4.238912 iter: 724 | loss: 4.238610 iter: 725 | loss: 4.238309 iter: 726 | loss: 4.238007 iter: 727 | loss: 4.237706 iter: 728 | loss: 4.237404 iter: 729 | loss: 4.237103 iter: 730 | loss: 4.236801 iter: 731 | loss: 4.236500 iter: 732 | loss: 4.236198 iter: 733 | loss: 4.235897 iter: 734 | loss: 4.235595 iter: 735 | loss: 4.235294 iter: 736 | loss: 4.234992 iter: 737 | loss: 4.234691 iter: 738 | loss: 4.234389 iter: 739 | loss: 4.234088 iter: 740 | loss: 4.233786 iter: 741 | loss: 4.233485 iter: 742 | loss: 4.233183 iter: 743 | loss: 4.232882 iter: 744 | loss: 4.232580 iter: 745 | loss: 4.232279 iter: 746 | loss: 4.231977 iter: 747 | loss: 4.231676 iter: 748 | loss: 4.231374 iter: 749 | loss: 4.231073 iter: 750 | loss: 4.230771 iter: 751 | loss: 4.230470 iter: 752 | loss: 4.230168 iter: 753 | loss: 4.229867 iter: 754 | loss: 4.229565 iter: 755 | loss: 4.229264 iter: 756 | loss: 4.228962 iter: 757 | loss: 4.228661 iter: 758 | loss: 4.228359 iter: 759 | loss: 4.228058 iter: 760 | loss: 4.227756 iter: 761 | loss: 4.227455 iter: 762 | loss: 4.227153 iter: 763 | loss: 4.226852 iter: 764 | loss: 4.226550 iter: 765 | loss: 4.226249 iter: 766 | loss: 4.225947 iter: 767 | loss: 4.225646 iter: 768 | loss: 4.225344 iter: 769 | loss: 4.225042 iter: 770 | loss: 4.224741 iter: 771 | loss: 4.224439 iter: 772 | loss: 4.224138 iter: 773 | loss: 4.223836 iter: 774 | loss: 4.223535 iter: 775 | loss: 4.223233 iter: 776 | loss: 4.222932 iter: 777 | loss: 4.222630 iter: 778 | loss: 4.222329 iter: 779 | loss: 4.222027 iter: 780 | loss: 4.221726 iter: 781 | loss: 4.221424 iter: 782 | loss: 4.221123 iter: 783 | loss: 4.220821 iter: 784 | loss: 4.220520 iter: 785 | loss: 4.220218 iter: 786 | loss: 4.219917 iter: 787 | loss: 4.219615 iter: 788 | loss: 4.219314 iter: 789 | loss: 4.219012 iter: 790 | loss: 4.218711 iter: 791 | loss: 4.218409 iter: 792 | loss: 4.218108 iter: 793 | loss: 4.217806 iter: 794 | loss: 4.217505 iter: 795 | loss: 4.217203 iter: 796 | loss: 4.216902 iter: 797 | loss: 4.216600 iter: 798 | loss: 4.216299 iter: 799 | loss: 4.215997 iter: 800 | loss: 4.215696 iter: 801 | loss: 4.215394 iter: 802 | loss: 4.215093 iter: 803 | loss: 4.214791 iter: 804 | loss: 4.214490 iter: 805 | loss: 4.214188 iter: 806 | loss: 4.213887 iter: 807 | loss: 4.213585 iter: 808 | loss: 4.213284 iter: 809 | loss: 4.212982 iter: 810 | loss: 4.212681 iter: 811 | loss: 4.212379 iter: 812 | loss: 4.212078 iter: 813 | loss: 4.211776 iter: 814 | loss: 4.211475 iter: 815 | loss: 4.211173 iter: 816 | loss: 4.210872 iter: 817 | loss: 4.210570 iter: 818 | loss: 4.210269 iter: 819 | loss: 4.209967 iter: 820 | loss: 4.209665 iter: 821 | loss: 4.209364 iter: 822 | loss: 4.209062 iter: 823 | loss: 4.208761 iter: 824 | loss: 4.208459 iter: 825 | loss: 4.208158 iter: 826 | loss: 4.207856 iter: 827 | loss: 4.207555 iter: 828 | loss: 4.207253 iter: 829 | loss: 4.206952 iter: 830 | loss: 4.206650 iter: 831 | loss: 4.206349 iter: 832 | loss: 4.206047 iter: 833 | loss: 4.205746 iter: 834 | loss: 4.205444 iter: 835 | loss: 4.205143 iter: 836 | loss: 4.204841 iter: 837 | loss: 4.204540 iter: 838 | loss: 4.204238 iter: 839 | loss: 4.203937 iter: 840 | loss: 4.203635 iter: 841 | loss: 4.203334 iter: 842 | loss: 4.203032 iter: 843 | loss: 4.202731 iter: 844 | loss: 4.202429 iter: 845 | loss: 4.202128 iter: 846 | loss: 4.201826 iter: 847 | loss: 4.201525 iter: 848 | loss: 4.201223 iter: 849 | loss: 4.200922 iter: 850 | loss: 4.200620 iter: 851 | loss: 4.200319 iter: 852 | loss: 4.200017 iter: 853 | loss: 4.199716 iter: 854 | loss: 4.199414 iter: 855 | loss: 4.199113 iter: 856 | loss: 4.198811 iter: 857 | loss: 4.198510 iter: 858 | loss: 4.198208 iter: 859 | loss: 4.197907 iter: 860 | loss: 4.197605 iter: 861 | loss: 4.197304 iter: 862 | loss: 4.197002 iter: 863 | loss: 4.196701 iter: 864 | loss: 4.196399 iter: 865 | loss: 4.196098 iter: 866 | loss: 4.195796 iter: 867 | loss: 4.195495 iter: 868 | loss: 4.195193 iter: 869 | loss: 4.194892 iter: 870 | loss: 4.194590 iter: 871 | loss: 4.194289 iter: 872 | loss: 4.193987 iter: 873 | loss: 4.193685 iter: 874 | loss: 4.193384 iter: 875 | loss: 4.193082 iter: 876 | loss: 4.192781 iter: 877 | loss: 4.192479 iter: 878 | loss: 4.192178 iter: 879 | loss: 4.191876 iter: 880 | loss: 4.191575 iter: 881 | loss: 4.191273 iter: 882 | loss: 4.190972 iter: 883 | loss: 4.190670 iter: 884 | loss: 4.190369 iter: 885 | loss: 4.190067 iter: 886 | loss: 4.189766 iter: 887 | loss: 4.189464 iter: 888 | loss: 4.189163 iter: 889 | loss: 4.188861 iter: 890 | loss: 4.188560 iter: 891 | loss: 4.188258 iter: 892 | loss: 4.187957 iter: 893 | loss: 4.187655 iter: 894 | loss: 4.187354 iter: 895 | loss: 4.187052 iter: 896 | loss: 4.186751 iter: 897 | loss: 4.186449 iter: 898 | loss: 4.186148 iter: 899 | loss: 4.185846 iter: 900 | loss: 4.185545 iter: 901 | loss: 4.185243 iter: 902 | loss: 4.184942 iter: 903 | loss: 4.184640 iter: 904 | loss: 4.184339 iter: 905 | loss: 4.184037 iter: 906 | loss: 4.183736 iter: 907 | loss: 4.183434 iter: 908 | loss: 4.183133 iter: 909 | loss: 4.182831 iter: 910 | loss: 4.182530 iter: 911 | loss: 4.182228 iter: 912 | loss: 4.181927 iter: 913 | loss: 4.181625 iter: 914 | loss: 4.181324 iter: 915 | loss: 4.181022 iter: 916 | loss: 4.180721 iter: 917 | loss: 4.180419 iter: 918 | loss: 4.180118 iter: 919 | loss: 4.179816 iter: 920 | loss: 4.179515 iter: 921 | loss: 4.179213 iter: 922 | loss: 4.178912 iter: 923 | loss: 4.178610 iter: 924 | loss: 4.178308 iter: 925 | loss: 4.178007 iter: 926 | loss: 4.177705 iter: 927 | loss: 4.177404 iter: 928 | loss: 4.177102 iter: 929 | loss: 4.176801 iter: 930 | loss: 4.176499 iter: 931 | loss: 4.176198 iter: 932 | loss: 4.175896 iter: 933 | loss: 4.175595 iter: 934 | loss: 4.175293 iter: 935 | loss: 4.174992 iter: 936 | loss: 4.174690 iter: 937 | loss: 4.174389 iter: 938 | loss: 4.174087 iter: 939 | loss: 4.173786 iter: 940 | loss: 4.173484 iter: 941 | loss: 4.173183 iter: 942 | loss: 4.172881 iter: 943 | loss: 4.172580 iter: 944 | loss: 4.172278 iter: 945 | loss: 4.171977 iter: 946 | loss: 4.171675 iter: 947 | loss: 4.171374 iter: 948 | loss: 4.171072 iter: 949 | loss: 4.170771 iter: 950 | loss: 4.170469 iter: 951 | loss: 4.170168 iter: 952 | loss: 4.169866 iter: 953 | loss: 4.169565 iter: 954 | loss: 4.169263 iter: 955 | loss: 4.168962 iter: 956 | loss: 4.168660 iter: 957 | loss: 4.168359 iter: 958 | loss: 4.168057 iter: 959 | loss: 4.167756 iter: 960 | loss: 4.167454 iter: 961 | loss: 4.167153 iter: 962 | loss: 4.166851 iter: 963 | loss: 4.166550 iter: 964 | loss: 4.166248 iter: 965 | loss: 4.165947 iter: 966 | loss: 4.165645 iter: 967 | loss: 4.165344 iter: 968 | loss: 4.165042 iter: 969 | loss: 4.164741 iter: 970 | loss: 4.164439 iter: 971 | loss: 4.164138 iter: 972 | loss: 4.163836 iter: 973 | loss: 4.163535 iter: 974 | loss: 4.163233 iter: 975 | loss: 4.162932 iter: 976 | loss: 4.162630 iter: 977 | loss: 4.162328 iter: 978 | loss: 4.162027 iter: 979 | loss: 4.161725 iter: 980 | loss: 4.161424 iter: 981 | loss: 4.161122 iter: 982 | loss: 4.160821 iter: 983 | loss: 4.160519 iter: 984 | loss: 4.160218 iter: 985 | loss: 4.159916 iter: 986 | loss: 4.159615 iter: 987 | loss: 4.159313 iter: 988 | loss: 4.159012 iter: 989 | loss: 4.158710 iter: 990 | loss: 4.158409 iter: 991 | loss: 4.158107 iter: 992 | loss: 4.157806 iter: 993 | loss: 4.157504 iter: 994 | loss: 4.157203 iter: 995 | loss: 4.156901 iter: 996 | loss: 4.156600 iter: 997 | loss: 4.156298 iter: 998 | loss: 4.155997 iter: 999 | loss: 4.155695 iter: 1000 | loss: 4.155394 iter: 1001 | loss: 4.155092 iter: 1002 | loss: 4.154791 iter: 1003 | loss: 4.154489 iter: 1004 | loss: 4.154188 iter: 1005 | loss: 4.153886 iter: 1006 | loss: 4.153585 iter: 1007 | loss: 4.153283 iter: 1008 | loss: 4.152982 iter: 1009 | loss: 4.152680 iter: 1010 | loss: 4.152379 iter: 1011 | loss: 4.152077 iter: 1012 | loss: 4.151776 iter: 1013 | loss: 4.151474 iter: 1014 | loss: 4.151173 iter: 1015 | loss: 4.150871 iter: 1016 | loss: 4.150570 iter: 1017 | loss: 4.150268 iter: 1018 | loss: 4.149967 iter: 1019 | loss: 4.149665 iter: 1020 | loss: 4.149364 iter: 1021 | loss: 4.149062 iter: 1022 | loss: 4.148761 iter: 1023 | loss: 4.148459 iter: 1024 | loss: 4.148158 iter: 1025 | loss: 4.147856 iter: 1026 | loss: 4.147555 iter: 1027 | loss: 4.147253 iter: 1028 | loss: 4.146952 iter: 1029 | loss: 4.146650 iter: 1030 | loss: 4.146348 iter: 1031 | loss: 4.146047 iter: 1032 | loss: 4.145745 iter: 1033 | loss: 4.145444 iter: 1034 | loss: 4.145142 iter: 1035 | loss: 4.144841 iter: 1036 | loss: 4.144539 iter: 1037 | loss: 4.144238 iter: 1038 | loss: 4.143936 iter: 1039 | loss: 4.143635 iter: 1040 | loss: 4.143333 iter: 1041 | loss: 4.143032 iter: 1042 | loss: 4.142730 iter: 1043 | loss: 4.142429 iter: 1044 | loss: 4.142127 iter: 1045 | loss: 4.141826 iter: 1046 | loss: 4.141524 iter: 1047 | loss: 4.141223 iter: 1048 | loss: 4.140921 iter: 1049 | loss: 4.140620 iter: 1050 | loss: 4.140318 iter: 1051 | loss: 4.140017 iter: 1052 | loss: 4.139715 iter: 1053 | loss: 4.139414 iter: 1054 | loss: 4.139112 iter: 1055 | loss: 4.138811 iter: 1056 | loss: 4.138509 iter: 1057 | loss: 4.138208 iter: 1058 | loss: 4.137906 iter: 1059 | loss: 4.137605 iter: 1060 | loss: 4.137303 iter: 1061 | loss: 4.137002 iter: 1062 | loss: 4.136700 iter: 1063 | loss: 4.136399 iter: 1064 | loss: 4.136097 iter: 1065 | loss: 4.135796 iter: 1066 | loss: 4.135494 iter: 1067 | loss: 4.135193 iter: 1068 | loss: 4.134891 iter: 1069 | loss: 4.134590 iter: 1070 | loss: 4.134288 iter: 1071 | loss: 4.133987 iter: 1072 | loss: 4.133685 iter: 1073 | loss: 4.133384 iter: 1074 | loss: 4.133082 iter: 1075 | loss: 4.132781 iter: 1076 | loss: 4.132479 iter: 1077 | loss: 4.132178 iter: 1078 | loss: 4.131876 iter: 1079 | loss: 4.131575 iter: 1080 | loss: 4.131273 iter: 1081 | loss: 4.130971 iter: 1082 | loss: 4.130670 iter: 1083 | loss: 4.130368 iter: 1084 | loss: 4.130067 iter: 1085 | loss: 4.129765 iter: 1086 | loss: 4.129464 iter: 1087 | loss: 4.129162 iter: 1088 | loss: 4.128861 iter: 1089 | loss: 4.128559 iter: 1090 | loss: 4.128258 iter: 1091 | loss: 4.127956 iter: 1092 | loss: 4.127655 iter: 1093 | loss: 4.127353 iter: 1094 | loss: 4.127052 iter: 1095 | loss: 4.126750 iter: 1096 | loss: 4.126449 iter: 1097 | loss: 4.126147 iter: 1098 | loss: 4.125846 iter: 1099 | loss: 4.125544 iter: 1100 | loss: 4.125243 iter: 1101 | loss: 4.124941 iter: 1102 | loss: 4.124640 iter: 1103 | loss: 4.124338 iter: 1104 | loss: 4.124037 iter: 1105 | loss: 4.123735 iter: 1106 | loss: 4.123434 iter: 1107 | loss: 4.123132 iter: 1108 | loss: 4.122831 iter: 1109 | loss: 4.122529 iter: 1110 | loss: 4.122228 iter: 1111 | loss: 4.121926 iter: 1112 | loss: 4.121625 iter: 1113 | loss: 4.121323 iter: 1114 | loss: 4.121022 iter: 1115 | loss: 4.120720 iter: 1116 | loss: 4.120419 iter: 1117 | loss: 4.120117 iter: 1118 | loss: 4.119816 iter: 1119 | loss: 4.119514 iter: 1120 | loss: 4.119213 iter: 1121 | loss: 4.118911 iter: 1122 | loss: 4.118610 iter: 1123 | loss: 4.118308 iter: 1124 | loss: 4.118007 iter: 1125 | loss: 4.117705 iter: 1126 | loss: 4.117404 iter: 1127 | loss: 4.117102 iter: 1128 | loss: 4.116801 iter: 1129 | loss: 4.116499 iter: 1130 | loss: 4.116198 iter: 1131 | loss: 4.115896 iter: 1132 | loss: 4.115595 iter: 1133 | loss: 4.115293 iter: 1134 | loss: 4.114991 iter: 1135 | loss: 4.114690 iter: 1136 | loss: 4.114388 iter: 1137 | loss: 4.114087 iter: 1138 | loss: 4.113785 iter: 1139 | loss: 4.113484 iter: 1140 | loss: 4.113182 iter: 1141 | loss: 4.112881 iter: 1142 | loss: 4.112579 iter: 1143 | loss: 4.112278 iter: 1144 | loss: 4.111976 iter: 1145 | loss: 4.111675 iter: 1146 | loss: 4.111373 iter: 1147 | loss: 4.111072 iter: 1148 | loss: 4.110770 iter: 1149 | loss: 4.110469 iter: 1150 | loss: 4.110167 iter: 1151 | loss: 4.109866 iter: 1152 | loss: 4.109564 iter: 1153 | loss: 4.109263 iter: 1154 | loss: 4.108961 iter: 1155 | loss: 4.108660 iter: 1156 | loss: 4.108358 iter: 1157 | loss: 4.108057 iter: 1158 | loss: 4.107755 iter: 1159 | loss: 4.107454 iter: 1160 | loss: 4.107152 iter: 1161 | loss: 4.106851 iter: 1162 | loss: 4.106549 iter: 1163 | loss: 4.106248 iter: 1164 | loss: 4.105946 iter: 1165 | loss: 4.105645 iter: 1166 | loss: 4.105343 iter: 1167 | loss: 4.105042 iter: 1168 | loss: 4.104740 iter: 1169 | loss: 4.104439 iter: 1170 | loss: 4.104137 iter: 1171 | loss: 4.103836 iter: 1172 | loss: 4.103534 iter: 1173 | loss: 4.103233 iter: 1174 | loss: 4.102931 iter: 1175 | loss: 4.102630 iter: 1176 | loss: 4.102328 iter: 1177 | loss: 4.102027 iter: 1178 | loss: 4.101725 iter: 1179 | loss: 4.101424 iter: 1180 | loss: 4.101122 iter: 1181 | loss: 4.100821 iter: 1182 | loss: 4.100519 iter: 1183 | loss: 4.100218 iter: 1184 | loss: 4.099916 iter: 1185 | loss: 4.099614 iter: 1186 | loss: 4.099313 iter: 1187 | loss: 4.099011 iter: 1188 | loss: 4.098710 iter: 1189 | loss: 4.098408 iter: 1190 | loss: 4.098107 iter: 1191 | loss: 4.097805 iter: 1192 | loss: 4.097504 iter: 1193 | loss: 4.097202 iter: 1194 | loss: 4.096901 iter: 1195 | loss: 4.096599 iter: 1196 | loss: 4.096298 iter: 1197 | loss: 4.095996 iter: 1198 | loss: 4.095695 iter: 1199 | loss: 4.095393 iter: 1200 | loss: 4.095092 iter: 1201 | loss: 4.094790 iter: 1202 | loss: 4.094489 iter: 1203 | loss: 4.094187 iter: 1204 | loss: 4.093886 iter: 1205 | loss: 4.093584 iter: 1206 | loss: 4.093283 iter: 1207 | loss: 4.092981 iter: 1208 | loss: 4.092680 iter: 1209 | loss: 4.092378 iter: 1210 | loss: 4.092077 iter: 1211 | loss: 4.091775 iter: 1212 | loss: 4.091474 iter: 1213 | loss: 4.091172 iter: 1214 | loss: 4.090871 iter: 1215 | loss: 4.090569 iter: 1216 | loss: 4.090268 iter: 1217 | loss: 4.089966 iter: 1218 | loss: 4.089665 iter: 1219 | loss: 4.089363 iter: 1220 | loss: 4.089062 iter: 1221 | loss: 4.088760 iter: 1222 | loss: 4.088459 iter: 1223 | loss: 4.088157 iter: 1224 | loss: 4.087856 iter: 1225 | loss: 4.087554 iter: 1226 | loss: 4.087253 iter: 1227 | loss: 4.086951 iter: 1228 | loss: 4.086650 iter: 1229 | loss: 4.086348 iter: 1230 | loss: 4.086047 iter: 1231 | loss: 4.085745 iter: 1232 | loss: 4.085444 iter: 1233 | loss: 4.085142 iter: 1234 | loss: 4.084841 iter: 1235 | loss: 4.084539 iter: 1236 | loss: 4.084238 iter: 1237 | loss: 4.083936 iter: 1238 | loss: 4.083634 iter: 1239 | loss: 4.083333 iter: 1240 | loss: 4.083031 iter: 1241 | loss: 4.082730 iter: 1242 | loss: 4.082428 iter: 1243 | loss: 4.082127 iter: 1244 | loss: 4.081825 iter: 1245 | loss: 4.081524 iter: 1246 | loss: 4.081222 iter: 1247 | loss: 4.080921 iter: 1248 | loss: 4.080619 iter: 1249 | loss: 4.080318 iter: 1250 | loss: 4.080016 iter: 1251 | loss: 4.079715 iter: 1252 | loss: 4.079413 iter: 1253 | loss: 4.079112 iter: 1254 | loss: 4.078810 iter: 1255 | loss: 4.078509 iter: 1256 | loss: 4.078207 iter: 1257 | loss: 4.077906 iter: 1258 | loss: 4.077604 iter: 1259 | loss: 4.077303 iter: 1260 | loss: 4.077001 iter: 1261 | loss: 4.076700 iter: 1262 | loss: 4.076398 iter: 1263 | loss: 4.076097 iter: 1264 | loss: 4.075795 iter: 1265 | loss: 4.075494 iter: 1266 | loss: 4.075192 iter: 1267 | loss: 4.074891 iter: 1268 | loss: 4.074589 iter: 1269 | loss: 4.074288 iter: 1270 | loss: 4.073986 iter: 1271 | loss: 4.073685 iter: 1272 | loss: 4.073383 iter: 1273 | loss: 4.073082 iter: 1274 | loss: 4.072780 iter: 1275 | loss: 4.072479 iter: 1276 | loss: 4.072177 iter: 1277 | loss: 4.071876 iter: 1278 | loss: 4.071574 iter: 1279 | loss: 4.071273 iter: 1280 | loss: 4.070971 iter: 1281 | loss: 4.070670 iter: 1282 | loss: 4.070368 iter: 1283 | loss: 4.070067 iter: 1284 | loss: 4.069765 iter: 1285 | loss: 4.069464 iter: 1286 | loss: 4.069162 iter: 1287 | loss: 4.068861 iter: 1288 | loss: 4.068559 iter: 1289 | loss: 4.068257 iter: 1290 | loss: 4.067956 iter: 1291 | loss: 4.067654 iter: 1292 | loss: 4.067353 iter: 1293 | loss: 4.067051 iter: 1294 | loss: 4.066750 iter: 1295 | loss: 4.066448 iter: 1296 | loss: 4.066147 iter: 1297 | loss: 4.065845 iter: 1298 | loss: 4.065544 iter: 1299 | loss: 4.065242 iter: 1300 | loss: 4.064941 iter: 1301 | loss: 4.064639 iter: 1302 | loss: 4.064338 iter: 1303 | loss: 4.064036 iter: 1304 | loss: 4.063735 iter: 1305 | loss: 4.063433 iter: 1306 | loss: 4.063132 iter: 1307 | loss: 4.062830 iter: 1308 | loss: 4.062529 iter: 1309 | loss: 4.062227 iter: 1310 | loss: 4.061926 iter: 1311 | loss: 4.061624 iter: 1312 | loss: 4.061323 iter: 1313 | loss: 4.061021 iter: 1314 | loss: 4.060720 iter: 1315 | loss: 4.060418 iter: 1316 | loss: 4.060117 iter: 1317 | loss: 4.059815 iter: 1318 | loss: 4.059514 iter: 1319 | loss: 4.059212 iter: 1320 | loss: 4.058911 iter: 1321 | loss: 4.058609 iter: 1322 | loss: 4.058308 iter: 1323 | loss: 4.058006 iter: 1324 | loss: 4.057705 iter: 1325 | loss: 4.057403 iter: 1326 | loss: 4.057102 iter: 1327 | loss: 4.056800 iter: 1328 | loss: 4.056499 iter: 1329 | loss: 4.056197 iter: 1330 | loss: 4.055896 iter: 1331 | loss: 4.055594 iter: 1332 | loss: 4.055293 iter: 1333 | loss: 4.054991 iter: 1334 | loss: 4.054690 iter: 1335 | loss: 4.054388 iter: 1336 | loss: 4.054087 iter: 1337 | loss: 4.053785 iter: 1338 | loss: 4.053484 iter: 1339 | loss: 4.053182 iter: 1340 | loss: 4.052881 iter: 1341 | loss: 4.052579 iter: 1342 | loss: 4.052277 iter: 1343 | loss: 4.051976 iter: 1344 | loss: 4.051674 iter: 1345 | loss: 4.051373 iter: 1346 | loss: 4.051071 iter: 1347 | loss: 4.050770 iter: 1348 | loss: 4.050468 iter: 1349 | loss: 4.050167 iter: 1350 | loss: 4.049865 iter: 1351 | loss: 4.049564 iter: 1352 | loss: 4.049262 iter: 1353 | loss: 4.048961 iter: 1354 | loss: 4.048659 iter: 1355 | loss: 4.048358 iter: 1356 | loss: 4.048056 iter: 1357 | loss: 4.047755 iter: 1358 | loss: 4.047453 iter: 1359 | loss: 4.047152 iter: 1360 | loss: 4.046850 iter: 1361 | loss: 4.046549 iter: 1362 | loss: 4.046247 iter: 1363 | loss: 4.045946 iter: 1364 | loss: 4.045644 iter: 1365 | loss: 4.045343 iter: 1366 | loss: 4.045041 iter: 1367 | loss: 4.044740 iter: 1368 | loss: 4.044438 iter: 1369 | loss: 4.044137 iter: 1370 | loss: 4.043835 iter: 1371 | loss: 4.043534 iter: 1372 | loss: 4.043232 iter: 1373 | loss: 4.042931 iter: 1374 | loss: 4.042629 iter: 1375 | loss: 4.042328 iter: 1376 | loss: 4.042026 iter: 1377 | loss: 4.041725 iter: 1378 | loss: 4.041423 iter: 1379 | loss: 4.041122 iter: 1380 | loss: 4.040820 iter: 1381 | loss: 4.040519 iter: 1382 | loss: 4.040217 iter: 1383 | loss: 4.039916 iter: 1384 | loss: 4.039614 iter: 1385 | loss: 4.039313 iter: 1386 | loss: 4.039011 iter: 1387 | loss: 4.038710 iter: 1388 | loss: 4.038408 iter: 1389 | loss: 4.038107 iter: 1390 | loss: 4.037805 iter: 1391 | loss: 4.037504 iter: 1392 | loss: 4.037202 iter: 1393 | loss: 4.036901 iter: 1394 | loss: 4.036599 iter: 1395 | loss: 4.036297 iter: 1396 | loss: 4.035996 iter: 1397 | loss: 4.035694 iter: 1398 | loss: 4.035393 iter: 1399 | loss: 4.035091 iter: 1400 | loss: 4.034790 iter: 1401 | loss: 4.034488 iter: 1402 | loss: 4.034187 iter: 1403 | loss: 4.033885 iter: 1404 | loss: 4.033584 iter: 1405 | loss: 4.033282 iter: 1406 | loss: 4.032981 iter: 1407 | loss: 4.032679 iter: 1408 | loss: 4.032378 iter: 1409 | loss: 4.032076 iter: 1410 | loss: 4.031775 iter: 1411 | loss: 4.031473 iter: 1412 | loss: 4.031172 iter: 1413 | loss: 4.030870 iter: 1414 | loss: 4.030569 iter: 1415 | loss: 4.030267 iter: 1416 | loss: 4.029966 iter: 1417 | loss: 4.029664 iter: 1418 | loss: 4.029363 iter: 1419 | loss: 4.029061 iter: 1420 | loss: 4.028760 iter: 1421 | loss: 4.028458 iter: 1422 | loss: 4.028157 iter: 1423 | loss: 4.027855 iter: 1424 | loss: 4.027554 iter: 1425 | loss: 4.027252 iter: 1426 | loss: 4.026951 iter: 1427 | loss: 4.026649 iter: 1428 | loss: 4.026348 iter: 1429 | loss: 4.026046 iter: 1430 | loss: 4.025745 iter: 1431 | loss: 4.025443 iter: 1432 | loss: 4.025142 iter: 1433 | loss: 4.024840 iter: 1434 | loss: 4.024539 iter: 1435 | loss: 4.024237 iter: 1436 | loss: 4.023936 iter: 1437 | loss: 4.023634 iter: 1438 | loss: 4.023333 iter: 1439 | loss: 4.023031 iter: 1440 | loss: 4.022730 iter: 1441 | loss: 4.022428 iter: 1442 | loss: 4.022127 iter: 1443 | loss: 4.021825 iter: 1444 | loss: 4.021524 iter: 1445 | loss: 4.021222 iter: 1446 | loss: 4.020920 iter: 1447 | loss: 4.020619 iter: 1448 | loss: 4.020317 iter: 1449 | loss: 4.020016 iter: 1450 | loss: 4.019714 iter: 1451 | loss: 4.019413 iter: 1452 | loss: 4.019111 iter: 1453 | loss: 4.018810 iter: 1454 | loss: 4.018508 iter: 1455 | loss: 4.018207 iter: 1456 | loss: 4.017905 iter: 1457 | loss: 4.017604 iter: 1458 | loss: 4.017302 iter: 1459 | loss: 4.017001 iter: 1460 | loss: 4.016699 iter: 1461 | loss: 4.016398 iter: 1462 | loss: 4.016096 iter: 1463 | loss: 4.015795 iter: 1464 | loss: 4.015493 iter: 1465 | loss: 4.015192 iter: 1466 | loss: 4.014890 iter: 1467 | loss: 4.014589 iter: 1468 | loss: 4.014287 iter: 1469 | loss: 4.013986 iter: 1470 | loss: 4.013684 iter: 1471 | loss: 4.013383 iter: 1472 | loss: 4.013081 iter: 1473 | loss: 4.012780 iter: 1474 | loss: 4.012478 iter: 1475 | loss: 4.012177 iter: 1476 | loss: 4.011875 iter: 1477 | loss: 4.011574 iter: 1478 | loss: 4.011272 iter: 1479 | loss: 4.010971 iter: 1480 | loss: 4.010669 iter: 1481 | loss: 4.010368 iter: 1482 | loss: 4.010066 iter: 1483 | loss: 4.009765 iter: 1484 | loss: 4.009463 iter: 1485 | loss: 4.009162 iter: 1486 | loss: 4.008860 iter: 1487 | loss: 4.008559 iter: 1488 | loss: 4.008257 iter: 1489 | loss: 4.007956 iter: 1490 | loss: 4.007654 iter: 1491 | loss: 4.007353 iter: 1492 | loss: 4.007051 iter: 1493 | loss: 4.006750 iter: 1494 | loss: 4.006448 iter: 1495 | loss: 4.006147 iter: 1496 | loss: 4.005845 iter: 1497 | loss: 4.005544 iter: 1498 | loss: 4.005242 iter: 1499 | loss: 4.004940 iter: 1500 | loss: 4.004639 iter: 1501 | loss: 4.004337 iter: 1502 | loss: 4.004036 iter: 1503 | loss: 4.003734 iter: 1504 | loss: 4.003433 iter: 1505 | loss: 4.003131 iter: 1506 | loss: 4.002830 iter: 1507 | loss: 4.002528 iter: 1508 | loss: 4.002227 iter: 1509 | loss: 4.001925 iter: 1510 | loss: 4.001624 iter: 1511 | loss: 4.001322 iter: 1512 | loss: 4.001021 iter: 1513 | loss: 4.000719 iter: 1514 | loss: 4.000418 iter: 1515 | loss: 4.000116 iter: 1516 | loss: 3.999815 iter: 1517 | loss: 3.999513 iter: 1518 | loss: 3.999212 iter: 1519 | loss: 3.998910 iter: 1520 | loss: 3.998609 iter: 1521 | loss: 3.998307 iter: 1522 | loss: 3.998006 iter: 1523 | loss: 3.997704 iter: 1524 | loss: 3.997403 iter: 1525 | loss: 3.997101 iter: 1526 | loss: 3.996800 iter: 1527 | loss: 3.996498 iter: 1528 | loss: 3.996197 iter: 1529 | loss: 3.995895 iter: 1530 | loss: 3.995594 iter: 1531 | loss: 3.995292 iter: 1532 | loss: 3.994991 iter: 1533 | loss: 3.994689 iter: 1534 | loss: 3.994388 iter: 1535 | loss: 3.994086 iter: 1536 | loss: 3.993785 iter: 1537 | loss: 3.993483 iter: 1538 | loss: 3.993182 iter: 1539 | loss: 3.992880 iter: 1540 | loss: 3.992579 iter: 1541 | loss: 3.992277 iter: 1542 | loss: 3.991976 iter: 1543 | loss: 3.991674 iter: 1544 | loss: 3.991373 iter: 1545 | loss: 3.991071 iter: 1546 | loss: 3.990770 iter: 1547 | loss: 3.990468 iter: 1548 | loss: 3.990167 iter: 1549 | loss: 3.989865 iter: 1550 | loss: 3.989563 iter: 1551 | loss: 3.989262 iter: 1552 | loss: 3.988960 iter: 1553 | loss: 3.988659 iter: 1554 | loss: 3.988357 iter: 1555 | loss: 3.988056 iter: 1556 | loss: 3.987754 iter: 1557 | loss: 3.987453 iter: 1558 | loss: 3.987151 iter: 1559 | loss: 3.986850 iter: 1560 | loss: 3.986548 iter: 1561 | loss: 3.986247 iter: 1562 | loss: 3.985945 iter: 1563 | loss: 3.985644 iter: 1564 | loss: 3.985342 iter: 1565 | loss: 3.985041 iter: 1566 | loss: 3.984739 iter: 1567 | loss: 3.984438 iter: 1568 | loss: 3.984136 iter: 1569 | loss: 3.983835 iter: 1570 | loss: 3.983533 iter: 1571 | loss: 3.983232 iter: 1572 | loss: 3.982930 iter: 1573 | loss: 3.982629 iter: 1574 | loss: 3.982327 iter: 1575 | loss: 3.982026 iter: 1576 | loss: 3.981724 iter: 1577 | loss: 3.981423 iter: 1578 | loss: 3.981121 iter: 1579 | loss: 3.980820 iter: 1580 | loss: 3.980518 iter: 1581 | loss: 3.980217 iter: 1582 | loss: 3.979915 iter: 1583 | loss: 3.979614 iter: 1584 | loss: 3.979312 iter: 1585 | loss: 3.979011 iter: 1586 | loss: 3.978709 iter: 1587 | loss: 3.978408 iter: 1588 | loss: 3.978106 iter: 1589 | loss: 3.977805 iter: 1590 | loss: 3.977503 iter: 1591 | loss: 3.977202 iter: 1592 | loss: 3.976900 iter: 1593 | loss: 3.976599 iter: 1594 | loss: 3.976297 iter: 1595 | loss: 3.975996 iter: 1596 | loss: 3.975694 iter: 1597 | loss: 3.975393 iter: 1598 | loss: 3.975091 iter: 1599 | loss: 3.974790 iter: 1600 | loss: 3.974488 iter: 1601 | loss: 3.974187 iter: 1602 | loss: 3.973885 iter: 1603 | loss: 3.973583 iter: 1604 | loss: 3.973282 iter: 1605 | loss: 3.972980 iter: 1606 | loss: 3.972679 iter: 1607 | loss: 3.972377 iter: 1608 | loss: 3.972076 iter: 1609 | loss: 3.971774 iter: 1610 | loss: 3.971473 iter: 1611 | loss: 3.971171 iter: 1612 | loss: 3.970870 iter: 1613 | loss: 3.970568 iter: 1614 | loss: 3.970267 iter: 1615 | loss: 3.969965 iter: 1616 | loss: 3.969664 iter: 1617 | loss: 3.969362 iter: 1618 | loss: 3.969061 iter: 1619 | loss: 3.968759 iter: 1620 | loss: 3.968458 iter: 1621 | loss: 3.968156 iter: 1622 | loss: 3.967855 iter: 1623 | loss: 3.967553 iter: 1624 | loss: 3.967252 iter: 1625 | loss: 3.966950 iter: 1626 | loss: 3.966649 iter: 1627 | loss: 3.966347 iter: 1628 | loss: 3.966046 iter: 1629 | loss: 3.965744 iter: 1630 | loss: 3.965443 iter: 1631 | loss: 3.965141 iter: 1632 | loss: 3.964840 iter: 1633 | loss: 3.964538 iter: 1634 | loss: 3.964237 iter: 1635 | loss: 3.963935 iter: 1636 | loss: 3.963634 iter: 1637 | loss: 3.963332 iter: 1638 | loss: 3.963031 iter: 1639 | loss: 3.962729 iter: 1640 | loss: 3.962428 iter: 1641 | loss: 3.962126 iter: 1642 | loss: 3.961825 iter: 1643 | loss: 3.961523 iter: 1644 | loss: 3.961222 iter: 1645 | loss: 3.960920 iter: 1646 | loss: 3.960619 iter: 1647 | loss: 3.960317 iter: 1648 | loss: 3.960016 iter: 1649 | loss: 3.959714 iter: 1650 | loss: 3.959413 iter: 1651 | loss: 3.959111 iter: 1652 | loss: 3.958810 iter: 1653 | loss: 3.958508 iter: 1654 | loss: 3.958206 iter: 1655 | loss: 3.957905 iter: 1656 | loss: 3.957603 iter: 1657 | loss: 3.957302 iter: 1658 | loss: 3.957000 iter: 1659 | loss: 3.956699 iter: 1660 | loss: 3.956397 iter: 1661 | loss: 3.956096 iter: 1662 | loss: 3.955794 iter: 1663 | loss: 3.955493 iter: 1664 | loss: 3.955191 iter: 1665 | loss: 3.954890 iter: 1666 | loss: 3.954588 iter: 1667 | loss: 3.954287 iter: 1668 | loss: 3.953985 iter: 1669 | loss: 3.953684 iter: 1670 | loss: 3.953382 iter: 1671 | loss: 3.953081 iter: 1672 | loss: 3.952779 iter: 1673 | loss: 3.952478 iter: 1674 | loss: 3.952176 iter: 1675 | loss: 3.951875 iter: 1676 | loss: 3.951573 iter: 1677 | loss: 3.951272 iter: 1678 | loss: 3.950970 iter: 1679 | loss: 3.950669 iter: 1680 | loss: 3.950367 iter: 1681 | loss: 3.950066 iter: 1682 | loss: 3.949764 iter: 1683 | loss: 3.949463 iter: 1684 | loss: 3.949161 iter: 1685 | loss: 3.948860 iter: 1686 | loss: 3.948558 iter: 1687 | loss: 3.948257 iter: 1688 | loss: 3.947955 iter: 1689 | loss: 3.947654 iter: 1690 | loss: 3.947352 iter: 1691 | loss: 3.947051 iter: 1692 | loss: 3.946749 iter: 1693 | loss: 3.946448 iter: 1694 | loss: 3.946146 iter: 1695 | loss: 3.945845 iter: 1696 | loss: 3.945543 iter: 1697 | loss: 3.945242 iter: 1698 | loss: 3.944940 iter: 1699 | loss: 3.944639 iter: 1700 | loss: 3.944337 iter: 1701 | loss: 3.944036 iter: 1702 | loss: 3.943734 iter: 1703 | loss: 3.943433 iter: 1704 | loss: 3.943131 iter: 1705 | loss: 3.942830 iter: 1706 | loss: 3.942528 iter: 1707 | loss: 3.942226 iter: 1708 | loss: 3.941925 iter: 1709 | loss: 3.941623 iter: 1710 | loss: 3.941322 iter: 1711 | loss: 3.941020 iter: 1712 | loss: 3.940719 iter: 1713 | loss: 3.940417 iter: 1714 | loss: 3.940116 iter: 1715 | loss: 3.939814 iter: 1716 | loss: 3.939513 iter: 1717 | loss: 3.939211 iter: 1718 | loss: 3.938910 iter: 1719 | loss: 3.938608 iter: 1720 | loss: 3.938307 iter: 1721 | loss: 3.938005 iter: 1722 | loss: 3.937704 iter: 1723 | loss: 3.937402 iter: 1724 | loss: 3.937101 iter: 1725 | loss: 3.936799 iter: 1726 | loss: 3.936498 iter: 1727 | loss: 3.936196 iter: 1728 | loss: 3.935895 iter: 1729 | loss: 3.935593 iter: 1730 | loss: 3.935292 iter: 1731 | loss: 3.934990 iter: 1732 | loss: 3.934689 iter: 1733 | loss: 3.934387 iter: 1734 | loss: 3.934086 iter: 1735 | loss: 3.933784 iter: 1736 | loss: 3.933483 iter: 1737 | loss: 3.933181 iter: 1738 | loss: 3.932880 iter: 1739 | loss: 3.932578 iter: 1740 | loss: 3.932277 iter: 1741 | loss: 3.931975 iter: 1742 | loss: 3.931674 iter: 1743 | loss: 3.931372 iter: 1744 | loss: 3.931071 iter: 1745 | loss: 3.930769 iter: 1746 | loss: 3.930468 iter: 1747 | loss: 3.930166 iter: 1748 | loss: 3.929865 iter: 1749 | loss: 3.929563 iter: 1750 | loss: 3.929262 iter: 1751 | loss: 3.928960 iter: 1752 | loss: 3.928659 iter: 1753 | loss: 3.928357 iter: 1754 | loss: 3.928056 iter: 1755 | loss: 3.927754 iter: 1756 | loss: 3.927453 iter: 1757 | loss: 3.927151 iter: 1758 | loss: 3.926849 iter: 1759 | loss: 3.926548 iter: 1760 | loss: 3.926246 iter: 1761 | loss: 3.925945 iter: 1762 | loss: 3.925643 iter: 1763 | loss: 3.925342 iter: 1764 | loss: 3.925040 iter: 1765 | loss: 3.924739 iter: 1766 | loss: 3.924437 iter: 1767 | loss: 3.924136 iter: 1768 | loss: 3.923834 iter: 1769 | loss: 3.923533 iter: 1770 | loss: 3.923231 iter: 1771 | loss: 3.922930 iter: 1772 | loss: 3.922628 iter: 1773 | loss: 3.922327 iter: 1774 | loss: 3.922025 iter: 1775 | loss: 3.921724 iter: 1776 | loss: 3.921422 iter: 1777 | loss: 3.921121 iter: 1778 | loss: 3.920819 iter: 1779 | loss: 3.920518 iter: 1780 | loss: 3.920216 iter: 1781 | loss: 3.919915 iter: 1782 | loss: 3.919613 iter: 1783 | loss: 3.919312 iter: 1784 | loss: 3.919010 iter: 1785 | loss: 3.918709 iter: 1786 | loss: 3.918407 iter: 1787 | loss: 3.918106 iter: 1788 | loss: 3.917804 iter: 1789 | loss: 3.917503 iter: 1790 | loss: 3.917201 iter: 1791 | loss: 3.916900 iter: 1792 | loss: 3.916598 iter: 1793 | loss: 3.916297 iter: 1794 | loss: 3.915995 iter: 1795 | loss: 3.915694 iter: 1796 | loss: 3.915392 iter: 1797 | loss: 3.915091 iter: 1798 | loss: 3.914789 iter: 1799 | loss: 3.914488 iter: 1800 | loss: 3.914186 iter: 1801 | loss: 3.913885 iter: 1802 | loss: 3.913583 iter: 1803 | loss: 3.913282 iter: 1804 | loss: 3.912980 iter: 1805 | loss: 3.912679 iter: 1806 | loss: 3.912377 iter: 1807 | loss: 3.912076 iter: 1808 | loss: 3.911774 iter: 1809 | loss: 3.911473 iter: 1810 | loss: 3.911171 iter: 1811 | loss: 3.910869 iter: 1812 | loss: 3.910568 iter: 1813 | loss: 3.910266 iter: 1814 | loss: 3.909965 iter: 1815 | loss: 3.909663 iter: 1816 | loss: 3.909362 iter: 1817 | loss: 3.909060 iter: 1818 | loss: 3.908759 iter: 1819 | loss: 3.908457 iter: 1820 | loss: 3.908156 iter: 1821 | loss: 3.907854 iter: 1822 | loss: 3.907553 iter: 1823 | loss: 3.907251 iter: 1824 | loss: 3.906950 iter: 1825 | loss: 3.906648 iter: 1826 | loss: 3.906347 iter: 1827 | loss: 3.906045 iter: 1828 | loss: 3.905744 iter: 1829 | loss: 3.905442 iter: 1830 | loss: 3.905141 iter: 1831 | loss: 3.904839 iter: 1832 | loss: 3.904538 iter: 1833 | loss: 3.904236 iter: 1834 | loss: 3.903935 iter: 1835 | loss: 3.903633 iter: 1836 | loss: 3.903332 iter: 1837 | loss: 3.903030 iter: 1838 | loss: 3.902729 iter: 1839 | loss: 3.902427 iter: 1840 | loss: 3.902126 iter: 1841 | loss: 3.901824 iter: 1842 | loss: 3.901523 iter: 1843 | loss: 3.901221 iter: 1844 | loss: 3.900920 iter: 1845 | loss: 3.900618 iter: 1846 | loss: 3.900317 iter: 1847 | loss: 3.900015 iter: 1848 | loss: 3.899714 iter: 1849 | loss: 3.899412 iter: 1850 | loss: 3.899111 iter: 1851 | loss: 3.898809 iter: 1852 | loss: 3.898508 iter: 1853 | loss: 3.898206 iter: 1854 | loss: 3.897905 iter: 1855 | loss: 3.897603 iter: 1856 | loss: 3.897302 iter: 1857 | loss: 3.897000 iter: 1858 | loss: 3.896699 iter: 1859 | loss: 3.896397 iter: 1860 | loss: 3.896096 iter: 1861 | loss: 3.895794 iter: 1862 | loss: 3.895493 iter: 1863 | loss: 3.895191 iter: 1864 | loss: 3.894889 iter: 1865 | loss: 3.894588 iter: 1866 | loss: 3.894286 iter: 1867 | loss: 3.893985 iter: 1868 | loss: 3.893683 iter: 1869 | loss: 3.893382 iter: 1870 | loss: 3.893080 iter: 1871 | loss: 3.892779 iter: 1872 | loss: 3.892477 iter: 1873 | loss: 3.892176 iter: 1874 | loss: 3.891874 iter: 1875 | loss: 3.891573 iter: 1876 | loss: 3.891271 iter: 1877 | loss: 3.890970 iter: 1878 | loss: 3.890668 iter: 1879 | loss: 3.890367 iter: 1880 | loss: 3.890065 iter: 1881 | loss: 3.889764 iter: 1882 | loss: 3.889462 iter: 1883 | loss: 3.889161 iter: 1884 | loss: 3.888859 iter: 1885 | loss: 3.888558 iter: 1886 | loss: 3.888256 iter: 1887 | loss: 3.887955 iter: 1888 | loss: 3.887653 iter: 1889 | loss: 3.887352 iter: 1890 | loss: 3.887050 iter: 1891 | loss: 3.886749 iter: 1892 | loss: 3.886447 iter: 1893 | loss: 3.886146 iter: 1894 | loss: 3.885844 iter: 1895 | loss: 3.885543 iter: 1896 | loss: 3.885241 iter: 1897 | loss: 3.884940 iter: 1898 | loss: 3.884638 iter: 1899 | loss: 3.884337 iter: 1900 | loss: 3.884035 iter: 1901 | loss: 3.883734 iter: 1902 | loss: 3.883432 iter: 1903 | loss: 3.883131 iter: 1904 | loss: 3.882829 iter: 1905 | loss: 3.882528 iter: 1906 | loss: 3.882226 iter: 1907 | loss: 3.881925 iter: 1908 | loss: 3.881623 iter: 1909 | loss: 3.881322 iter: 1910 | loss: 3.881020 iter: 1911 | loss: 3.880719 iter: 1912 | loss: 3.880417 iter: 1913 | loss: 3.880116 iter: 1914 | loss: 3.879814 iter: 1915 | loss: 3.879512 iter: 1916 | loss: 3.879211 iter: 1917 | loss: 3.878909 iter: 1918 | loss: 3.878608 iter: 1919 | loss: 3.878306 iter: 1920 | loss: 3.878005 iter: 1921 | loss: 3.877703 iter: 1922 | loss: 3.877402 iter: 1923 | loss: 3.877100 iter: 1924 | loss: 3.876799 iter: 1925 | loss: 3.876497 iter: 1926 | loss: 3.876196 iter: 1927 | loss: 3.875894 iter: 1928 | loss: 3.875593 iter: 1929 | loss: 3.875291 iter: 1930 | loss: 3.874990 iter: 1931 | loss: 3.874688 iter: 1932 | loss: 3.874387 iter: 1933 | loss: 3.874085 iter: 1934 | loss: 3.873784 iter: 1935 | loss: 3.873482 iter: 1936 | loss: 3.873181 iter: 1937 | loss: 3.872879 iter: 1938 | loss: 3.872578 iter: 1939 | loss: 3.872276 iter: 1940 | loss: 3.871975 iter: 1941 | loss: 3.871673 iter: 1942 | loss: 3.871372 iter: 1943 | loss: 3.871070 iter: 1944 | loss: 3.870769 iter: 1945 | loss: 3.870467 iter: 1946 | loss: 3.870166 iter: 1947 | loss: 3.869864 iter: 1948 | loss: 3.869563 iter: 1949 | loss: 3.869261 iter: 1950 | loss: 3.868960 iter: 1951 | loss: 3.868658 iter: 1952 | loss: 3.868357 iter: 1953 | loss: 3.868055 iter: 1954 | loss: 3.867754 iter: 1955 | loss: 3.867452 iter: 1956 | loss: 3.867151 iter: 1957 | loss: 3.866849 iter: 1958 | loss: 3.866548 iter: 1959 | loss: 3.866246 iter: 1960 | loss: 3.865945 iter: 1961 | loss: 3.865643 iter: 1962 | loss: 3.865342 iter: 1963 | loss: 3.865040 iter: 1964 | loss: 3.864739 iter: 1965 | loss: 3.864437 iter: 1966 | loss: 3.864136 iter: 1967 | loss: 3.863834 iter: 1968 | loss: 3.863532 iter: 1969 | loss: 3.863231 iter: 1970 | loss: 3.862929 iter: 1971 | loss: 3.862628 iter: 1972 | loss: 3.862326 iter: 1973 | loss: 3.862025 iter: 1974 | loss: 3.861723 iter: 1975 | loss: 3.861422 iter: 1976 | loss: 3.861120 iter: 1977 | loss: 3.860819 iter: 1978 | loss: 3.860517 iter: 1979 | loss: 3.860216 iter: 1980 | loss: 3.859914 iter: 1981 | loss: 3.859613 iter: 1982 | loss: 3.859311 iter: 1983 | loss: 3.859010 iter: 1984 | loss: 3.858708 iter: 1985 | loss: 3.858407 iter: 1986 | loss: 3.858105 iter: 1987 | loss: 3.857804 iter: 1988 | loss: 3.857502 iter: 1989 | loss: 3.857201 iter: 1990 | loss: 3.856899 iter: 1991 | loss: 3.856598 iter: 1992 | loss: 3.856296 iter: 1993 | loss: 3.855995 iter: 1994 | loss: 3.855693 iter: 1995 | loss: 3.855392 iter: 1996 | loss: 3.855090 iter: 1997 | loss: 3.854789 iter: 1998 | loss: 3.854487 iter: 1999 | loss: 3.854186 iter: 2000 | loss: 3.853884 iter: 2001 | loss: 3.853583 iter: 2002 | loss: 3.853281 iter: 2003 | loss: 3.852980 iter: 2004 | loss: 3.852678 iter: 2005 | loss: 3.852377 iter: 2006 | loss: 3.852075 iter: 2007 | loss: 3.851774 iter: 2008 | loss: 3.851472 iter: 2009 | loss: 3.851171 iter: 2010 | loss: 3.850869 iter: 2011 | loss: 3.850568 iter: 2012 | loss: 3.850266 iter: 2013 | loss: 3.849965 iter: 2014 | loss: 3.849663 iter: 2015 | loss: 3.849362 iter: 2016 | loss: 3.849060 iter: 2017 | loss: 3.848759 iter: 2018 | loss: 3.848457 iter: 2019 | loss: 3.848155 iter: 2020 | loss: 3.847854 iter: 2021 | loss: 3.847552 iter: 2022 | loss: 3.847251 iter: 2023 | loss: 3.846949 iter: 2024 | loss: 3.846648 iter: 2025 | loss: 3.846346 iter: 2026 | loss: 3.846045 iter: 2027 | loss: 3.845743 iter: 2028 | loss: 3.845442 iter: 2029 | loss: 3.845140 iter: 2030 | loss: 3.844839 iter: 2031 | loss: 3.844537 iter: 2032 | loss: 3.844236 iter: 2033 | loss: 3.843934 iter: 2034 | loss: 3.843633 iter: 2035 | loss: 3.843331 iter: 2036 | loss: 3.843030 iter: 2037 | loss: 3.842728 iter: 2038 | loss: 3.842427 iter: 2039 | loss: 3.842125 iter: 2040 | loss: 3.841824 iter: 2041 | loss: 3.841522 iter: 2042 | loss: 3.841221 iter: 2043 | loss: 3.840919 iter: 2044 | loss: 3.840618 iter: 2045 | loss: 3.840316 iter: 2046 | loss: 3.840015 iter: 2047 | loss: 3.839713 iter: 2048 | loss: 3.839412 iter: 2049 | loss: 3.839110 iter: 2050 | loss: 3.838809 iter: 2051 | loss: 3.838507 iter: 2052 | loss: 3.838206 iter: 2053 | loss: 3.837904 iter: 2054 | loss: 3.837603 iter: 2055 | loss: 3.837301 iter: 2056 | loss: 3.837000 iter: 2057 | loss: 3.836698 iter: 2058 | loss: 3.836397 iter: 2059 | loss: 3.836095 iter: 2060 | loss: 3.835794 iter: 2061 | loss: 3.835492 iter: 2062 | loss: 3.835191 iter: 2063 | loss: 3.834889 iter: 2064 | loss: 3.834588 iter: 2065 | loss: 3.834286 iter: 2066 | loss: 3.833985 iter: 2067 | loss: 3.833683 iter: 2068 | loss: 3.833382 iter: 2069 | loss: 3.833080 iter: 2070 | loss: 3.832779 iter: 2071 | loss: 3.832477 iter: 2072 | loss: 3.832175 iter: 2073 | loss: 3.831874 iter: 2074 | loss: 3.831572 iter: 2075 | loss: 3.831271 iter: 2076 | loss: 3.830969 iter: 2077 | loss: 3.830668 iter: 2078 | loss: 3.830366 iter: 2079 | loss: 3.830065 iter: 2080 | loss: 3.829763 iter: 2081 | loss: 3.829462 iter: 2082 | loss: 3.829160 iter: 2083 | loss: 3.828859 iter: 2084 | loss: 3.828557 iter: 2085 | loss: 3.828256 iter: 2086 | loss: 3.827954 iter: 2087 | loss: 3.827653 iter: 2088 | loss: 3.827351 iter: 2089 | loss: 3.827050 iter: 2090 | loss: 3.826748 iter: 2091 | loss: 3.826447 iter: 2092 | loss: 3.826145 iter: 2093 | loss: 3.825844 iter: 2094 | loss: 3.825542 iter: 2095 | loss: 3.825241 iter: 2096 | loss: 3.824939 iter: 2097 | loss: 3.824638 iter: 2098 | loss: 3.824336 iter: 2099 | loss: 3.824035 iter: 2100 | loss: 3.823733 iter: 2101 | loss: 3.823432 iter: 2102 | loss: 3.823130 iter: 2103 | loss: 3.822829 iter: 2104 | loss: 3.822527 iter: 2105 | loss: 3.822226 iter: 2106 | loss: 3.821924 iter: 2107 | loss: 3.821623 iter: 2108 | loss: 3.821321 iter: 2109 | loss: 3.821020 iter: 2110 | loss: 3.820718 iter: 2111 | loss: 3.820417 iter: 2112 | loss: 3.820115 iter: 2113 | loss: 3.819814 iter: 2114 | loss: 3.819512 iter: 2115 | loss: 3.819211 iter: 2116 | loss: 3.818909 iter: 2117 | loss: 3.818608 iter: 2118 | loss: 3.818306 iter: 2119 | loss: 3.818005 iter: 2120 | loss: 3.817703 iter: 2121 | loss: 3.817402 iter: 2122 | loss: 3.817100 iter: 2123 | loss: 3.816798 iter: 2124 | loss: 3.816497 iter: 2125 | loss: 3.816195 iter: 2126 | loss: 3.815894 iter: 2127 | loss: 3.815592 iter: 2128 | loss: 3.815291 iter: 2129 | loss: 3.814989 iter: 2130 | loss: 3.814688 iter: 2131 | loss: 3.814386 iter: 2132 | loss: 3.814085 iter: 2133 | loss: 3.813783 iter: 2134 | loss: 3.813482 iter: 2135 | loss: 3.813180 iter: 2136 | loss: 3.812879 iter: 2137 | loss: 3.812577 iter: 2138 | loss: 3.812276 iter: 2139 | loss: 3.811974 iter: 2140 | loss: 3.811673 iter: 2141 | loss: 3.811371 iter: 2142 | loss: 3.811070 iter: 2143 | loss: 3.810768 iter: 2144 | loss: 3.810467 iter: 2145 | loss: 3.810165 iter: 2146 | loss: 3.809864 iter: 2147 | loss: 3.809562 iter: 2148 | loss: 3.809261 iter: 2149 | loss: 3.808959 iter: 2150 | loss: 3.808658 iter: 2151 | loss: 3.808356 iter: 2152 | loss: 3.808055 iter: 2153 | loss: 3.807753 iter: 2154 | loss: 3.807452 iter: 2155 | loss: 3.807150 iter: 2156 | loss: 3.806849 iter: 2157 | loss: 3.806547 iter: 2158 | loss: 3.806246 iter: 2159 | loss: 3.805944 iter: 2160 | loss: 3.805643 iter: 2161 | loss: 3.805341 iter: 2162 | loss: 3.805040 iter: 2163 | loss: 3.804738 iter: 2164 | loss: 3.804437 iter: 2165 | loss: 3.804135 iter: 2166 | loss: 3.803834 iter: 2167 | loss: 3.803532 iter: 2168 | loss: 3.803231 iter: 2169 | loss: 3.802929 iter: 2170 | loss: 3.802628 iter: 2171 | loss: 3.802326 iter: 2172 | loss: 3.802025 iter: 2173 | loss: 3.801723 iter: 2174 | loss: 3.801422 iter: 2175 | loss: 3.801120 iter: 2176 | loss: 3.800818 iter: 2177 | loss: 3.800517 iter: 2178 | loss: 3.800215 iter: 2179 | loss: 3.799914 iter: 2180 | loss: 3.799612 iter: 2181 | loss: 3.799311 iter: 2182 | loss: 3.799009 iter: 2183 | loss: 3.798708 iter: 2184 | loss: 3.798406 iter: 2185 | loss: 3.798105 iter: 2186 | loss: 3.797803 iter: 2187 | loss: 3.797502 iter: 2188 | loss: 3.797200 iter: 2189 | loss: 3.796899 iter: 2190 | loss: 3.796597 iter: 2191 | loss: 3.796296 iter: 2192 | loss: 3.795994 iter: 2193 | loss: 3.795693 iter: 2194 | loss: 3.795391 iter: 2195 | loss: 3.795090 iter: 2196 | loss: 3.794788 iter: 2197 | loss: 3.794487 iter: 2198 | loss: 3.794185 iter: 2199 | loss: 3.793884 iter: 2200 | loss: 3.793582 iter: 2201 | loss: 3.793281 iter: 2202 | loss: 3.792979 iter: 2203 | loss: 3.792678 iter: 2204 | loss: 3.792376 iter: 2205 | loss: 3.792075 iter: 2206 | loss: 3.791773 iter: 2207 | loss: 3.791472 iter: 2208 | loss: 3.791170 iter: 2209 | loss: 3.790869 iter: 2210 | loss: 3.790567 iter: 2211 | loss: 3.790266 iter: 2212 | loss: 3.789964 iter: 2213 | loss: 3.789663 iter: 2214 | loss: 3.789361 iter: 2215 | loss: 3.789060 iter: 2216 | loss: 3.788758 iter: 2217 | loss: 3.788457 iter: 2218 | loss: 3.788155 iter: 2219 | loss: 3.787854 iter: 2220 | loss: 3.787552 iter: 2221 | loss: 3.787251 iter: 2222 | loss: 3.786949 iter: 2223 | loss: 3.786648 iter: 2224 | loss: 3.786346 iter: 2225 | loss: 3.786045 iter: 2226 | loss: 3.785743 iter: 2227 | loss: 3.785442 iter: 2228 | loss: 3.785140 iter: 2229 | loss: 3.784838 iter: 2230 | loss: 3.784537 iter: 2231 | loss: 3.784235 iter: 2232 | loss: 3.783934 iter: 2233 | loss: 3.783632 iter: 2234 | loss: 3.783331 iter: 2235 | loss: 3.783029 iter: 2236 | loss: 3.782728 iter: 2237 | loss: 3.782426 iter: 2238 | loss: 3.782125 iter: 2239 | loss: 3.781823 iter: 2240 | loss: 3.781522 iter: 2241 | loss: 3.781220 iter: 2242 | loss: 3.780919 iter: 2243 | loss: 3.780617 iter: 2244 | loss: 3.780316 iter: 2245 | loss: 3.780014 iter: 2246 | loss: 3.779713 iter: 2247 | loss: 3.779411 iter: 2248 | loss: 3.779110 iter: 2249 | loss: 3.778808 iter: 2250 | loss: 3.778507 iter: 2251 | loss: 3.778205 iter: 2252 | loss: 3.777904 iter: 2253 | loss: 3.777602 iter: 2254 | loss: 3.777301 iter: 2255 | loss: 3.776999 iter: 2256 | loss: 3.776698 iter: 2257 | loss: 3.776396 iter: 2258 | loss: 3.776095 iter: 2259 | loss: 3.775793 iter: 2260 | loss: 3.775492 iter: 2261 | loss: 3.775190 iter: 2262 | loss: 3.774889 iter: 2263 | loss: 3.774587 iter: 2264 | loss: 3.774286 iter: 2265 | loss: 3.773984 iter: 2266 | loss: 3.773683 iter: 2267 | loss: 3.773381 iter: 2268 | loss: 3.773080 iter: 2269 | loss: 3.772778 iter: 2270 | loss: 3.772477 iter: 2271 | loss: 3.772175 iter: 2272 | loss: 3.771874 iter: 2273 | loss: 3.771572 iter: 2274 | loss: 3.771271 iter: 2275 | loss: 3.770969 iter: 2276 | loss: 3.770668 iter: 2277 | loss: 3.770366 iter: 2278 | loss: 3.770065 iter: 2279 | loss: 3.769763 iter: 2280 | loss: 3.769461 iter: 2281 | loss: 3.769160 iter: 2282 | loss: 3.768858 iter: 2283 | loss: 3.768557 iter: 2284 | loss: 3.768255 iter: 2285 | loss: 3.767954 iter: 2286 | loss: 3.767652 iter: 2287 | loss: 3.767351 iter: 2288 | loss: 3.767049 iter: 2289 | loss: 3.766748 iter: 2290 | loss: 3.766446 iter: 2291 | loss: 3.766145 iter: 2292 | loss: 3.765843 iter: 2293 | loss: 3.765542 iter: 2294 | loss: 3.765240 iter: 2295 | loss: 3.764939 iter: 2296 | loss: 3.764637 iter: 2297 | loss: 3.764336 iter: 2298 | loss: 3.764034 iter: 2299 | loss: 3.763733 iter: 2300 | loss: 3.763431 iter: 2301 | loss: 3.763130 iter: 2302 | loss: 3.762828 iter: 2303 | loss: 3.762527 iter: 2304 | loss: 3.762225 iter: 2305 | loss: 3.761924 iter: 2306 | loss: 3.761622 iter: 2307 | loss: 3.761321 iter: 2308 | loss: 3.761019 iter: 2309 | loss: 3.760718 iter: 2310 | loss: 3.760416 iter: 2311 | loss: 3.760115 iter: 2312 | loss: 3.759813 iter: 2313 | loss: 3.759512 iter: 2314 | loss: 3.759210 iter: 2315 | loss: 3.758909 iter: 2316 | loss: 3.758607 iter: 2317 | loss: 3.758306 iter: 2318 | loss: 3.758004 iter: 2319 | loss: 3.757703 iter: 2320 | loss: 3.757401 iter: 2321 | loss: 3.757100 iter: 2322 | loss: 3.756798 iter: 2323 | loss: 3.756497 iter: 2324 | loss: 3.756195 iter: 2325 | loss: 3.755894 iter: 2326 | loss: 3.755592 iter: 2327 | loss: 3.755291 iter: 2328 | loss: 3.754989 iter: 2329 | loss: 3.754688 iter: 2330 | loss: 3.754386 iter: 2331 | loss: 3.754085 iter: 2332 | loss: 3.753783 iter: 2333 | loss: 3.753481 iter: 2334 | loss: 3.753180 iter: 2335 | loss: 3.752878 iter: 2336 | loss: 3.752577 iter: 2337 | loss: 3.752275 iter: 2338 | loss: 3.751974 iter: 2339 | loss: 3.751672 iter: 2340 | loss: 3.751371 iter: 2341 | loss: 3.751069 iter: 2342 | loss: 3.750768 iter: 2343 | loss: 3.750466 iter: 2344 | loss: 3.750165 iter: 2345 | loss: 3.749863 iter: 2346 | loss: 3.749562 iter: 2347 | loss: 3.749260 iter: 2348 | loss: 3.748959 iter: 2349 | loss: 3.748657 iter: 2350 | loss: 3.748356 iter: 2351 | loss: 3.748054 iter: 2352 | loss: 3.747753 iter: 2353 | loss: 3.747451 iter: 2354 | loss: 3.747150 iter: 2355 | loss: 3.746848 iter: 2356 | loss: 3.746547 iter: 2357 | loss: 3.746245 iter: 2358 | loss: 3.745944 iter: 2359 | loss: 3.745642 iter: 2360 | loss: 3.745341 iter: 2361 | loss: 3.745039 iter: 2362 | loss: 3.744738 iter: 2363 | loss: 3.744436 iter: 2364 | loss: 3.744135 iter: 2365 | loss: 3.743833 iter: 2366 | loss: 3.743532 iter: 2367 | loss: 3.743230 iter: 2368 | loss: 3.742929 iter: 2369 | loss: 3.742627 iter: 2370 | loss: 3.742326 iter: 2371 | loss: 3.742024 iter: 2372 | loss: 3.741723 iter: 2373 | loss: 3.741421 iter: 2374 | loss: 3.741120 iter: 2375 | loss: 3.740818 iter: 2376 | loss: 3.740517 iter: 2377 | loss: 3.740215 iter: 2378 | loss: 3.739914 iter: 2379 | loss: 3.739612 iter: 2380 | loss: 3.739311 iter: 2381 | loss: 3.739009 iter: 2382 | loss: 3.738708 iter: 2383 | loss: 3.738406 iter: 2384 | loss: 3.738104 iter: 2385 | loss: 3.737803 iter: 2386 | loss: 3.737501 iter: 2387 | loss: 3.737200 iter: 2388 | loss: 3.736898 iter: 2389 | loss: 3.736597 iter: 2390 | loss: 3.736295 iter: 2391 | loss: 3.735994 iter: 2392 | loss: 3.735692 iter: 2393 | loss: 3.735391 iter: 2394 | loss: 3.735089 iter: 2395 | loss: 3.734788 iter: 2396 | loss: 3.734486 iter: 2397 | loss: 3.734185 iter: 2398 | loss: 3.733883 iter: 2399 | loss: 3.733582 iter: 2400 | loss: 3.733280 iter: 2401 | loss: 3.732979 iter: 2402 | loss: 3.732677 iter: 2403 | loss: 3.732376 iter: 2404 | loss: 3.732074 iter: 2405 | loss: 3.731773 iter: 2406 | loss: 3.731471 iter: 2407 | loss: 3.731170 iter: 2408 | loss: 3.730868 iter: 2409 | loss: 3.730567 iter: 2410 | loss: 3.730265 iter: 2411 | loss: 3.729964 iter: 2412 | loss: 3.729662 iter: 2413 | loss: 3.729361 iter: 2414 | loss: 3.729059 iter: 2415 | loss: 3.728758 iter: 2416 | loss: 3.728456 iter: 2417 | loss: 3.728155 iter: 2418 | loss: 3.727853 iter: 2419 | loss: 3.727552 iter: 2420 | loss: 3.727250 iter: 2421 | loss: 3.726949 iter: 2422 | loss: 3.726647 iter: 2423 | loss: 3.726346 iter: 2424 | loss: 3.726044 iter: 2425 | loss: 3.725743 iter: 2426 | loss: 3.725441 iter: 2427 | loss: 3.725140 iter: 2428 | loss: 3.724838 iter: 2429 | loss: 3.724537 iter: 2430 | loss: 3.724235 iter: 2431 | loss: 3.723934 iter: 2432 | loss: 3.723632 iter: 2433 | loss: 3.723331 iter: 2434 | loss: 3.723029 iter: 2435 | loss: 3.722728 iter: 2436 | loss: 3.722426 iter: 2437 | loss: 3.722124 iter: 2438 | loss: 3.721823 iter: 2439 | loss: 3.721521 iter: 2440 | loss: 3.721220 iter: 2441 | loss: 3.720918 iter: 2442 | loss: 3.720617 iter: 2443 | loss: 3.720315 iter: 2444 | loss: 3.720014 iter: 2445 | loss: 3.719712 iter: 2446 | loss: 3.719411 iter: 2447 | loss: 3.719109 iter: 2448 | loss: 3.718808 iter: 2449 | loss: 3.718506 iter: 2450 | loss: 3.718205 iter: 2451 | loss: 3.717903 iter: 2452 | loss: 3.717602 iter: 2453 | loss: 3.717300 iter: 2454 | loss: 3.716999 iter: 2455 | loss: 3.716697 iter: 2456 | loss: 3.716396 iter: 2457 | loss: 3.716094 iter: 2458 | loss: 3.715793 iter: 2459 | loss: 3.715491 iter: 2460 | loss: 3.715190 iter: 2461 | loss: 3.714888 iter: 2462 | loss: 3.714587 iter: 2463 | loss: 3.714285 iter: 2464 | loss: 3.713984 iter: 2465 | loss: 3.713682 iter: 2466 | loss: 3.713381 iter: 2467 | loss: 3.713079 iter: 2468 | loss: 3.712778 iter: 2469 | loss: 3.712476 iter: 2470 | loss: 3.712175 iter: 2471 | loss: 3.711873 iter: 2472 | loss: 3.711572 iter: 2473 | loss: 3.711270 iter: 2474 | loss: 3.710969 iter: 2475 | loss: 3.710667 iter: 2476 | loss: 3.710366 iter: 2477 | loss: 3.710064 iter: 2478 | loss: 3.709763 iter: 2479 | loss: 3.709461 iter: 2480 | loss: 3.709160 iter: 2481 | loss: 3.708858 iter: 2482 | loss: 3.708557 iter: 2483 | loss: 3.708255 iter: 2484 | loss: 3.707954 iter: 2485 | loss: 3.707652 iter: 2486 | loss: 3.707351 iter: 2487 | loss: 3.707049 iter: 2488 | loss: 3.706747 iter: 2489 | loss: 3.706446 iter: 2490 | loss: 3.706144 iter: 2491 | loss: 3.705843 iter: 2492 | loss: 3.705541 iter: 2493 | loss: 3.705240 iter: 2494 | loss: 3.704938 iter: 2495 | loss: 3.704637 iter: 2496 | loss: 3.704335 iter: 2497 | loss: 3.704034 iter: 2498 | loss: 3.703732 iter: 2499 | loss: 3.703431 iter: 2500 | loss: 3.703129 iter: 2501 | loss: 3.702828 iter: 2502 | loss: 3.702526 iter: 2503 | loss: 3.702225 iter: 2504 | loss: 3.701923 iter: 2505 | loss: 3.701622 iter: 2506 | loss: 3.701320 iter: 2507 | loss: 3.701019 iter: 2508 | loss: 3.700717 iter: 2509 | loss: 3.700416 iter: 2510 | loss: 3.700114 iter: 2511 | loss: 3.699813 iter: 2512 | loss: 3.699511 iter: 2513 | loss: 3.699210 iter: 2514 | loss: 3.698908 iter: 2515 | loss: 3.698607 iter: 2516 | loss: 3.698305 iter: 2517 | loss: 3.698004 iter: 2518 | loss: 3.697702 iter: 2519 | loss: 3.697401 iter: 2520 | loss: 3.697099 iter: 2521 | loss: 3.696798 iter: 2522 | loss: 3.696496 iter: 2523 | loss: 3.696195 iter: 2524 | loss: 3.695893 iter: 2525 | loss: 3.695592 iter: 2526 | loss: 3.695290 iter: 2527 | loss: 3.694989 iter: 2528 | loss: 3.694687 iter: 2529 | loss: 3.694386 iter: 2530 | loss: 3.694084 iter: 2531 | loss: 3.693783 iter: 2532 | loss: 3.693481 iter: 2533 | loss: 3.693180 iter: 2534 | loss: 3.692878 iter: 2535 | loss: 3.692577 iter: 2536 | loss: 3.692275 iter: 2537 | loss: 3.691974 iter: 2538 | loss: 3.691672 iter: 2539 | loss: 3.691371 iter: 2540 | loss: 3.691069 iter: 2541 | loss: 3.690767 iter: 2542 | loss: 3.690466 iter: 2543 | loss: 3.690164 iter: 2544 | loss: 3.689863 iter: 2545 | loss: 3.689561 iter: 2546 | loss: 3.689260 iter: 2547 | loss: 3.688958 iter: 2548 | loss: 3.688657 iter: 2549 | loss: 3.688355 iter: 2550 | loss: 3.688054 iter: 2551 | loss: 3.687752 iter: 2552 | loss: 3.687451 iter: 2553 | loss: 3.687149 iter: 2554 | loss: 3.686848 iter: 2555 | loss: 3.686546 iter: 2556 | loss: 3.686245 iter: 2557 | loss: 3.685943 iter: 2558 | loss: 3.685642 iter: 2559 | loss: 3.685340 iter: 2560 | loss: 3.685039 iter: 2561 | loss: 3.684737 iter: 2562 | loss: 3.684436 iter: 2563 | loss: 3.684134 iter: 2564 | loss: 3.683833 iter: 2565 | loss: 3.683531 iter: 2566 | loss: 3.683230 iter: 2567 | loss: 3.682928 iter: 2568 | loss: 3.682627 iter: 2569 | loss: 3.682325 iter: 2570 | loss: 3.682024 iter: 2571 | loss: 3.681722 iter: 2572 | loss: 3.681421 iter: 2573 | loss: 3.681119 iter: 2574 | loss: 3.680818 iter: 2575 | loss: 3.680516 iter: 2576 | loss: 3.680215 iter: 2577 | loss: 3.679913 iter: 2578 | loss: 3.679612 iter: 2579 | loss: 3.679310 iter: 2580 | loss: 3.679009 iter: 2581 | loss: 3.678707 iter: 2582 | loss: 3.678406 iter: 2583 | loss: 3.678104 iter: 2584 | loss: 3.677803 iter: 2585 | loss: 3.677501 iter: 2586 | loss: 3.677200 iter: 2587 | loss: 3.676898 iter: 2588 | loss: 3.676597 iter: 2589 | loss: 3.676295 iter: 2590 | loss: 3.675994 iter: 2591 | loss: 3.675692 iter: 2592 | loss: 3.675391 iter: 2593 | loss: 3.675089 iter: 2594 | loss: 3.674787 iter: 2595 | loss: 3.674486 iter: 2596 | loss: 3.674184 iter: 2597 | loss: 3.673883 iter: 2598 | loss: 3.673581 iter: 2599 | loss: 3.673280 iter: 2600 | loss: 3.672978 iter: 2601 | loss: 3.672677 iter: 2602 | loss: 3.672375 iter: 2603 | loss: 3.672074 iter: 2604 | loss: 3.671772 iter: 2605 | loss: 3.671471 iter: 2606 | loss: 3.671169 iter: 2607 | loss: 3.670868 iter: 2608 | loss: 3.670566 iter: 2609 | loss: 3.670265 iter: 2610 | loss: 3.669963 iter: 2611 | loss: 3.669662 iter: 2612 | loss: 3.669360 iter: 2613 | loss: 3.669059 iter: 2614 | loss: 3.668757 iter: 2615 | loss: 3.668456 iter: 2616 | loss: 3.668154 iter: 2617 | loss: 3.667853 iter: 2618 | loss: 3.667551 iter: 2619 | loss: 3.667250 iter: 2620 | loss: 3.666948 iter: 2621 | loss: 3.666647 iter: 2622 | loss: 3.666345 iter: 2623 | loss: 3.666044 iter: 2624 | loss: 3.665742 iter: 2625 | loss: 3.665441 iter: 2626 | loss: 3.665139 iter: 2627 | loss: 3.664838 iter: 2628 | loss: 3.664536 iter: 2629 | loss: 3.664235 iter: 2630 | loss: 3.663933 iter: 2631 | loss: 3.663632 iter: 2632 | loss: 3.663330 iter: 2633 | loss: 3.663029 iter: 2634 | loss: 3.662727 iter: 2635 | loss: 3.662426 iter: 2636 | loss: 3.662124 iter: 2637 | loss: 3.661823 iter: 2638 | loss: 3.661521 iter: 2639 | loss: 3.661220 iter: 2640 | loss: 3.660918 iter: 2641 | loss: 3.660617 iter: 2642 | loss: 3.660315 iter: 2643 | loss: 3.660014 iter: 2644 | loss: 3.659712 iter: 2645 | loss: 3.659410 iter: 2646 | loss: 3.659109 iter: 2647 | loss: 3.658807 iter: 2648 | loss: 3.658506 iter: 2649 | loss: 3.658204 iter: 2650 | loss: 3.657903 iter: 2651 | loss: 3.657601 iter: 2652 | loss: 3.657300 iter: 2653 | loss: 3.656998 iter: 2654 | loss: 3.656697 iter: 2655 | loss: 3.656395 iter: 2656 | loss: 3.656094 iter: 2657 | loss: 3.655792 iter: 2658 | loss: 3.655491 iter: 2659 | loss: 3.655189 iter: 2660 | loss: 3.654888 iter: 2661 | loss: 3.654586 iter: 2662 | loss: 3.654285 iter: 2663 | loss: 3.653983 iter: 2664 | loss: 3.653682 iter: 2665 | loss: 3.653380 iter: 2666 | loss: 3.653079 iter: 2667 | loss: 3.652777 iter: 2668 | loss: 3.652476 iter: 2669 | loss: 3.652174 iter: 2670 | loss: 3.651873 iter: 2671 | loss: 3.651571 iter: 2672 | loss: 3.651270 iter: 2673 | loss: 3.650968 iter: 2674 | loss: 3.650667 iter: 2675 | loss: 3.650365 iter: 2676 | loss: 3.650064 iter: 2677 | loss: 3.649762 iter: 2678 | loss: 3.649461 iter: 2679 | loss: 3.649159 iter: 2680 | loss: 3.648858 iter: 2681 | loss: 3.648556 iter: 2682 | loss: 3.648255 iter: 2683 | loss: 3.647953 iter: 2684 | loss: 3.647652 iter: 2685 | loss: 3.647350 iter: 2686 | loss: 3.647049 iter: 2687 | loss: 3.646747 iter: 2688 | loss: 3.646446 iter: 2689 | loss: 3.646144 iter: 2690 | loss: 3.645843 iter: 2691 | loss: 3.645541 iter: 2692 | loss: 3.645240 iter: 2693 | loss: 3.644938 iter: 2694 | loss: 3.644637 iter: 2695 | loss: 3.644335 iter: 2696 | loss: 3.644034 iter: 2697 | loss: 3.643732 iter: 2698 | loss: 3.643430 iter: 2699 | loss: 3.643129 iter: 2700 | loss: 3.642827 iter: 2701 | loss: 3.642526 iter: 2702 | loss: 3.642224 iter: 2703 | loss: 3.641923 iter: 2704 | loss: 3.641621 iter: 2705 | loss: 3.641320 iter: 2706 | loss: 3.641018 iter: 2707 | loss: 3.640717 iter: 2708 | loss: 3.640415 iter: 2709 | loss: 3.640114 iter: 2710 | loss: 3.639812 iter: 2711 | loss: 3.639511 iter: 2712 | loss: 3.639209 iter: 2713 | loss: 3.638908 iter: 2714 | loss: 3.638606 iter: 2715 | loss: 3.638305 iter: 2716 | loss: 3.638003 iter: 2717 | loss: 3.637702 iter: 2718 | loss: 3.637400 iter: 2719 | loss: 3.637099 iter: 2720 | loss: 3.636797 iter: 2721 | loss: 3.636496 iter: 2722 | loss: 3.636194 iter: 2723 | loss: 3.635893 iter: 2724 | loss: 3.635591 iter: 2725 | loss: 3.635290 iter: 2726 | loss: 3.634988 iter: 2727 | loss: 3.634687 iter: 2728 | loss: 3.634385 iter: 2729 | loss: 3.634084 iter: 2730 | loss: 3.633782 iter: 2731 | loss: 3.633481 iter: 2732 | loss: 3.633179 iter: 2733 | loss: 3.632878 iter: 2734 | loss: 3.632576 iter: 2735 | loss: 3.632275 iter: 2736 | loss: 3.631973 iter: 2737 | loss: 3.631672 iter: 2738 | loss: 3.631370 iter: 2739 | loss: 3.631069 iter: 2740 | loss: 3.630767 iter: 2741 | loss: 3.630466 iter: 2742 | loss: 3.630164 iter: 2743 | loss: 3.629863 iter: 2744 | loss: 3.629561 iter: 2745 | loss: 3.629260 iter: 2746 | loss: 3.628958 iter: 2747 | loss: 3.628657 iter: 2748 | loss: 3.628355 iter: 2749 | loss: 3.628053 iter: 2750 | loss: 3.627752 iter: 2751 | loss: 3.627450 iter: 2752 | loss: 3.627149 iter: 2753 | loss: 3.626847 iter: 2754 | loss: 3.626546 iter: 2755 | loss: 3.626244 iter: 2756 | loss: 3.625943 iter: 2757 | loss: 3.625641 iter: 2758 | loss: 3.625340 iter: 2759 | loss: 3.625038 iter: 2760 | loss: 3.624737 iter: 2761 | loss: 3.624435 iter: 2762 | loss: 3.624134 iter: 2763 | loss: 3.623832 iter: 2764 | loss: 3.623531 iter: 2765 | loss: 3.623229 iter: 2766 | loss: 3.622928 iter: 2767 | loss: 3.622626 iter: 2768 | loss: 3.622325 iter: 2769 | loss: 3.622023 iter: 2770 | loss: 3.621722 iter: 2771 | loss: 3.621420 iter: 2772 | loss: 3.621119 iter: 2773 | loss: 3.620817 iter: 2774 | loss: 3.620516 iter: 2775 | loss: 3.620214 iter: 2776 | loss: 3.619913 iter: 2777 | loss: 3.619611 iter: 2778 | loss: 3.619310 iter: 2779 | loss: 3.619008 iter: 2780 | loss: 3.618707 iter: 2781 | loss: 3.618405 iter: 2782 | loss: 3.618104 iter: 2783 | loss: 3.617802 iter: 2784 | loss: 3.617501 iter: 2785 | loss: 3.617199 iter: 2786 | loss: 3.616898 iter: 2787 | loss: 3.616596 iter: 2788 | loss: 3.616295 iter: 2789 | loss: 3.615993 iter: 2790 | loss: 3.615692 iter: 2791 | loss: 3.615390 iter: 2792 | loss: 3.615089 iter: 2793 | loss: 3.614787 iter: 2794 | loss: 3.614486 iter: 2795 | loss: 3.614184 iter: 2796 | loss: 3.613883 iter: 2797 | loss: 3.613581 iter: 2798 | loss: 3.613280 iter: 2799 | loss: 3.612978 iter: 2800 | loss: 3.612677 iter: 2801 | loss: 3.612375 iter: 2802 | loss: 3.612073 iter: 2803 | loss: 3.611772 iter: 2804 | loss: 3.611470 iter: 2805 | loss: 3.611169 iter: 2806 | loss: 3.610867 iter: 2807 | loss: 3.610566 iter: 2808 | loss: 3.610264 iter: 2809 | loss: 3.609963 iter: 2810 | loss: 3.609661 iter: 2811 | loss: 3.609360 iter: 2812 | loss: 3.609058 iter: 2813 | loss: 3.608757 iter: 2814 | loss: 3.608455 iter: 2815 | loss: 3.608154 iter: 2816 | loss: 3.607852 iter: 2817 | loss: 3.607551 iter: 2818 | loss: 3.607249 iter: 2819 | loss: 3.606948 iter: 2820 | loss: 3.606646 iter: 2821 | loss: 3.606345 iter: 2822 | loss: 3.606043 iter: 2823 | loss: 3.605742 iter: 2824 | loss: 3.605440 iter: 2825 | loss: 3.605139 iter: 2826 | loss: 3.604837 iter: 2827 | loss: 3.604536 iter: 2828 | loss: 3.604234 iter: 2829 | loss: 3.603933 iter: 2830 | loss: 3.603631 iter: 2831 | loss: 3.603330 iter: 2832 | loss: 3.603028 iter: 2833 | loss: 3.602727 iter: 2834 | loss: 3.602425 iter: 2835 | loss: 3.602124 iter: 2836 | loss: 3.601822 iter: 2837 | loss: 3.601521 iter: 2838 | loss: 3.601219 iter: 2839 | loss: 3.600918 iter: 2840 | loss: 3.600616 iter: 2841 | loss: 3.600315 iter: 2842 | loss: 3.600013 iter: 2843 | loss: 3.599712 iter: 2844 | loss: 3.599410 iter: 2845 | loss: 3.599109 iter: 2846 | loss: 3.598807 iter: 2847 | loss: 3.598506 iter: 2848 | loss: 3.598204 iter: 2849 | loss: 3.597903 iter: 2850 | loss: 3.597601 iter: 2851 | loss: 3.597300 iter: 2852 | loss: 3.596998 iter: 2853 | loss: 3.596696 iter: 2854 | loss: 3.596395 iter: 2855 | loss: 3.596093 iter: 2856 | loss: 3.595792 iter: 2857 | loss: 3.595490 iter: 2858 | loss: 3.595189 iter: 2859 | loss: 3.594887 iter: 2860 | loss: 3.594586 iter: 2861 | loss: 3.594284 iter: 2862 | loss: 3.593983 iter: 2863 | loss: 3.593681 iter: 2864 | loss: 3.593380 iter: 2865 | loss: 3.593078 iter: 2866 | loss: 3.592777 iter: 2867 | loss: 3.592475 iter: 2868 | loss: 3.592174 iter: 2869 | loss: 3.591872 iter: 2870 | loss: 3.591571 iter: 2871 | loss: 3.591269 iter: 2872 | loss: 3.590968 iter: 2873 | loss: 3.590666 iter: 2874 | loss: 3.590365 iter: 2875 | loss: 3.590063 iter: 2876 | loss: 3.589762 iter: 2877 | loss: 3.589460 iter: 2878 | loss: 3.589159 iter: 2879 | loss: 3.588857 iter: 2880 | loss: 3.588556 iter: 2881 | loss: 3.588254 iter: 2882 | loss: 3.587953 iter: 2883 | loss: 3.587651 iter: 2884 | loss: 3.587350 iter: 2885 | loss: 3.587048 iter: 2886 | loss: 3.586747 iter: 2887 | loss: 3.586445 iter: 2888 | loss: 3.586144 iter: 2889 | loss: 3.585842 iter: 2890 | loss: 3.585541 iter: 2891 | loss: 3.585239 iter: 2892 | loss: 3.584938 iter: 2893 | loss: 3.584636 iter: 2894 | loss: 3.584335 iter: 2895 | loss: 3.584033 iter: 2896 | loss: 3.583732 iter: 2897 | loss: 3.583430 iter: 2898 | loss: 3.583129 iter: 2899 | loss: 3.582827 iter: 2900 | loss: 3.582526 iter: 2901 | loss: 3.582224 iter: 2902 | loss: 3.581923 iter: 2903 | loss: 3.581621 iter: 2904 | loss: 3.581320 iter: 2905 | loss: 3.581018 iter: 2906 | loss: 3.580716 iter: 2907 | loss: 3.580415 iter: 2908 | loss: 3.580113 iter: 2909 | loss: 3.579812 iter: 2910 | loss: 3.579510 iter: 2911 | loss: 3.579209 iter: 2912 | loss: 3.578907 iter: 2913 | loss: 3.578606 iter: 2914 | loss: 3.578304 iter: 2915 | loss: 3.578003 iter: 2916 | loss: 3.577701 iter: 2917 | loss: 3.577400 iter: 2918 | loss: 3.577098 iter: 2919 | loss: 3.576797 iter: 2920 | loss: 3.576495 iter: 2921 | loss: 3.576194 iter: 2922 | loss: 3.575892 iter: 2923 | loss: 3.575591 iter: 2924 | loss: 3.575289 iter: 2925 | loss: 3.574988 iter: 2926 | loss: 3.574686 iter: 2927 | loss: 3.574385 iter: 2928 | loss: 3.574083 iter: 2929 | loss: 3.573782 iter: 2930 | loss: 3.573480 iter: 2931 | loss: 3.573179 iter: 2932 | loss: 3.572877 iter: 2933 | loss: 3.572576 iter: 2934 | loss: 3.572274 iter: 2935 | loss: 3.571973 iter: 2936 | loss: 3.571671 iter: 2937 | loss: 3.571370 iter: 2938 | loss: 3.571068 iter: 2939 | loss: 3.570767 iter: 2940 | loss: 3.570465 iter: 2941 | loss: 3.570164 iter: 2942 | loss: 3.569862 iter: 2943 | loss: 3.569561 iter: 2944 | loss: 3.569259 iter: 2945 | loss: 3.568958 iter: 2946 | loss: 3.568656 iter: 2947 | loss: 3.568355 iter: 2948 | loss: 3.568053 iter: 2949 | loss: 3.567752 iter: 2950 | loss: 3.567450 iter: 2951 | loss: 3.567149 iter: 2952 | loss: 3.566847 iter: 2953 | loss: 3.566546 iter: 2954 | loss: 3.566244 iter: 2955 | loss: 3.565943 iter: 2956 | loss: 3.565641 iter: 2957 | loss: 3.565340 iter: 2958 | loss: 3.565038 iter: 2959 | loss: 3.564736 iter: 2960 | loss: 3.564435 iter: 2961 | loss: 3.564133 iter: 2962 | loss: 3.563832 iter: 2963 | loss: 3.563530 iter: 2964 | loss: 3.563229 iter: 2965 | loss: 3.562927 iter: 2966 | loss: 3.562626 iter: 2967 | loss: 3.562324 iter: 2968 | loss: 3.562023 iter: 2969 | loss: 3.561721 iter: 2970 | loss: 3.561420 iter: 2971 | loss: 3.561118 iter: 2972 | loss: 3.560817 iter: 2973 | loss: 3.560515 iter: 2974 | loss: 3.560214 iter: 2975 | loss: 3.559912 iter: 2976 | loss: 3.559611 iter: 2977 | loss: 3.559309 iter: 2978 | loss: 3.559008 iter: 2979 | loss: 3.558706 iter: 2980 | loss: 3.558405 iter: 2981 | loss: 3.558103 iter: 2982 | loss: 3.557802 iter: 2983 | loss: 3.557500 iter: 2984 | loss: 3.557199 iter: 2985 | loss: 3.556897 iter: 2986 | loss: 3.556596 iter: 2987 | loss: 3.556294 iter: 2988 | loss: 3.555993 iter: 2989 | loss: 3.555691 iter: 2990 | loss: 3.555390 iter: 2991 | loss: 3.555088 iter: 2992 | loss: 3.554787 iter: 2993 | loss: 3.554485 iter: 2994 | loss: 3.554184 iter: 2995 | loss: 3.553882 iter: 2996 | loss: 3.553581 iter: 2997 | loss: 3.553279 iter: 2998 | loss: 3.552978 iter: 2999 | loss: 3.552676 iter: 3000 | loss: 3.552375 iter: 3001 | loss: 3.552073 iter: 3002 | loss: 3.551772 iter: 3003 | loss: 3.551470 iter: 3004 | loss: 3.551169 iter: 3005 | loss: 3.550867 iter: 3006 | loss: 3.550566 iter: 3007 | loss: 3.550264 iter: 3008 | loss: 3.549963 iter: 3009 | loss: 3.549661 iter: 3010 | loss: 3.549359 iter: 3011 | loss: 3.549058 iter: 3012 | loss: 3.548756 iter: 3013 | loss: 3.548455 iter: 3014 | loss: 3.548153 iter: 3015 | loss: 3.547852 iter: 3016 | loss: 3.547550 iter: 3017 | loss: 3.547249 iter: 3018 | loss: 3.546947 iter: 3019 | loss: 3.546646 iter: 3020 | loss: 3.546344 iter: 3021 | loss: 3.546043 iter: 3022 | loss: 3.545741 iter: 3023 | loss: 3.545440 iter: 3024 | loss: 3.545138 iter: 3025 | loss: 3.544837 iter: 3026 | loss: 3.544535 iter: 3027 | loss: 3.544234 iter: 3028 | loss: 3.543932 iter: 3029 | loss: 3.543631 iter: 3030 | loss: 3.543329 iter: 3031 | loss: 3.543028 iter: 3032 | loss: 3.542726 iter: 3033 | loss: 3.542425 iter: 3034 | loss: 3.542123 iter: 3035 | loss: 3.541822 iter: 3036 | loss: 3.541520 iter: 3037 | loss: 3.541219 iter: 3038 | loss: 3.540917 iter: 3039 | loss: 3.540616 iter: 3040 | loss: 3.540314 iter: 3041 | loss: 3.540013 iter: 3042 | loss: 3.539711 iter: 3043 | loss: 3.539410 iter: 3044 | loss: 3.539108 iter: 3045 | loss: 3.538807 iter: 3046 | loss: 3.538505 iter: 3047 | loss: 3.538204 iter: 3048 | loss: 3.537902 iter: 3049 | loss: 3.537601 iter: 3050 | loss: 3.537299 iter: 3051 | loss: 3.536998 iter: 3052 | loss: 3.536696 iter: 3053 | loss: 3.536395 iter: 3054 | loss: 3.536093 iter: 3055 | loss: 3.535792 iter: 3056 | loss: 3.535490 iter: 3057 | loss: 3.535189 iter: 3058 | loss: 3.534887 iter: 3059 | loss: 3.534586 iter: 3060 | loss: 3.534284 iter: 3061 | loss: 3.533983 iter: 3062 | loss: 3.533681 iter: 3063 | loss: 3.533379 iter: 3064 | loss: 3.533078 iter: 3065 | loss: 3.532776 iter: 3066 | loss: 3.532475 iter: 3067 | loss: 3.532173 iter: 3068 | loss: 3.531872 iter: 3069 | loss: 3.531570 iter: 3070 | loss: 3.531269 iter: 3071 | loss: 3.530967 iter: 3072 | loss: 3.530666 iter: 3073 | loss: 3.530364 iter: 3074 | loss: 3.530063 iter: 3075 | loss: 3.529761 iter: 3076 | loss: 3.529460 iter: 3077 | loss: 3.529158 iter: 3078 | loss: 3.528857 iter: 3079 | loss: 3.528555 iter: 3080 | loss: 3.528254 iter: 3081 | loss: 3.527952 iter: 3082 | loss: 3.527651 iter: 3083 | loss: 3.527349 iter: 3084 | loss: 3.527048 iter: 3085 | loss: 3.526746 iter: 3086 | loss: 3.526445 iter: 3087 | loss: 3.526143 iter: 3088 | loss: 3.525842 iter: 3089 | loss: 3.525540 iter: 3090 | loss: 3.525239 iter: 3091 | loss: 3.524937 iter: 3092 | loss: 3.524636 iter: 3093 | loss: 3.524334 iter: 3094 | loss: 3.524033 iter: 3095 | loss: 3.523731 iter: 3096 | loss: 3.523430 iter: 3097 | loss: 3.523128 iter: 3098 | loss: 3.522827 iter: 3099 | loss: 3.522525 iter: 3100 | loss: 3.522224 iter: 3101 | loss: 3.521922 iter: 3102 | loss: 3.521621 iter: 3103 | loss: 3.521319 iter: 3104 | loss: 3.521018 iter: 3105 | loss: 3.520716 iter: 3106 | loss: 3.520415 iter: 3107 | loss: 3.520113 iter: 3108 | loss: 3.519812 iter: 3109 | loss: 3.519510 iter: 3110 | loss: 3.519209 iter: 3111 | loss: 3.518907 iter: 3112 | loss: 3.518606 iter: 3113 | loss: 3.518304 iter: 3114 | loss: 3.518002 iter: 3115 | loss: 3.517701 iter: 3116 | loss: 3.517399 iter: 3117 | loss: 3.517098 iter: 3118 | loss: 3.516796 iter: 3119 | loss: 3.516495 iter: 3120 | loss: 3.516193 iter: 3121 | loss: 3.515892 iter: 3122 | loss: 3.515590 iter: 3123 | loss: 3.515289 iter: 3124 | loss: 3.514987 iter: 3125 | loss: 3.514686 iter: 3126 | loss: 3.514384 iter: 3127 | loss: 3.514083 iter: 3128 | loss: 3.513781 iter: 3129 | loss: 3.513480 iter: 3130 | loss: 3.513178 iter: 3131 | loss: 3.512877 iter: 3132 | loss: 3.512575 iter: 3133 | loss: 3.512274 iter: 3134 | loss: 3.511972 iter: 3135 | loss: 3.511671 iter: 3136 | loss: 3.511369 iter: 3137 | loss: 3.511068 iter: 3138 | loss: 3.510766 iter: 3139 | loss: 3.510465 iter: 3140 | loss: 3.510163 iter: 3141 | loss: 3.509862 iter: 3142 | loss: 3.509560 iter: 3143 | loss: 3.509259 iter: 3144 | loss: 3.508957 iter: 3145 | loss: 3.508656 iter: 3146 | loss: 3.508354 iter: 3147 | loss: 3.508053 iter: 3148 | loss: 3.507751 iter: 3149 | loss: 3.507450 iter: 3150 | loss: 3.507148 iter: 3151 | loss: 3.506847 iter: 3152 | loss: 3.506545 iter: 3153 | loss: 3.506244 iter: 3154 | loss: 3.505942 iter: 3155 | loss: 3.505641 iter: 3156 | loss: 3.505339 iter: 3157 | loss: 3.505038 iter: 3158 | loss: 3.504736 iter: 3159 | loss: 3.504435 iter: 3160 | loss: 3.504133 iter: 3161 | loss: 3.503832 iter: 3162 | loss: 3.503530 iter: 3163 | loss: 3.503229 iter: 3164 | loss: 3.502927 iter: 3165 | loss: 3.502626 iter: 3166 | loss: 3.502324 iter: 3167 | loss: 3.502022 iter: 3168 | loss: 3.501721 iter: 3169 | loss: 3.501419 iter: 3170 | loss: 3.501118 iter: 3171 | loss: 3.500816 iter: 3172 | loss: 3.500515 iter: 3173 | loss: 3.500213 iter: 3174 | loss: 3.499912 iter: 3175 | loss: 3.499610 iter: 3176 | loss: 3.499309 iter: 3177 | loss: 3.499007 iter: 3178 | loss: 3.498706 iter: 3179 | loss: 3.498404 iter: 3180 | loss: 3.498103 iter: 3181 | loss: 3.497801 iter: 3182 | loss: 3.497500 iter: 3183 | loss: 3.497198 iter: 3184 | loss: 3.496897 iter: 3185 | loss: 3.496595 iter: 3186 | loss: 3.496294 iter: 3187 | loss: 3.495992 iter: 3188 | loss: 3.495691 iter: 3189 | loss: 3.495389 iter: 3190 | loss: 3.495088 iter: 3191 | loss: 3.494786 iter: 3192 | loss: 3.494485 iter: 3193 | loss: 3.494183 iter: 3194 | loss: 3.493882 iter: 3195 | loss: 3.493580 iter: 3196 | loss: 3.493279 iter: 3197 | loss: 3.492977 iter: 3198 | loss: 3.492676 iter: 3199 | loss: 3.492374 iter: 3200 | loss: 3.492073 iter: 3201 | loss: 3.491771 iter: 3202 | loss: 3.491470 iter: 3203 | loss: 3.491168 iter: 3204 | loss: 3.490867 iter: 3205 | loss: 3.490565 iter: 3206 | loss: 3.490264 iter: 3207 | loss: 3.489962 iter: 3208 | loss: 3.489661 iter: 3209 | loss: 3.489359 iter: 3210 | loss: 3.489058 iter: 3211 | loss: 3.488756 iter: 3212 | loss: 3.488455 iter: 3213 | loss: 3.488153 iter: 3214 | loss: 3.487852 iter: 3215 | loss: 3.487550 iter: 3216 | loss: 3.487249 iter: 3217 | loss: 3.486947 iter: 3218 | loss: 3.486645 iter: 3219 | loss: 3.486344 iter: 3220 | loss: 3.486042 iter: 3221 | loss: 3.485741 iter: 3222 | loss: 3.485439 iter: 3223 | loss: 3.485138 iter: 3224 | loss: 3.484836 iter: 3225 | loss: 3.484535 iter: 3226 | loss: 3.484233 iter: 3227 | loss: 3.483932 iter: 3228 | loss: 3.483630 iter: 3229 | loss: 3.483329 iter: 3230 | loss: 3.483027 iter: 3231 | loss: 3.482726 iter: 3232 | loss: 3.482424 iter: 3233 | loss: 3.482123 iter: 3234 | loss: 3.481821 iter: 3235 | loss: 3.481520 iter: 3236 | loss: 3.481218 iter: 3237 | loss: 3.480917 iter: 3238 | loss: 3.480615 iter: 3239 | loss: 3.480314 iter: 3240 | loss: 3.480012 iter: 3241 | loss: 3.479711 iter: 3242 | loss: 3.479409 iter: 3243 | loss: 3.479108 iter: 3244 | loss: 3.478806 iter: 3245 | loss: 3.478505 iter: 3246 | loss: 3.478203 iter: 3247 | loss: 3.477902 iter: 3248 | loss: 3.477600 iter: 3249 | loss: 3.477299 iter: 3250 | loss: 3.476997 iter: 3251 | loss: 3.476696 iter: 3252 | loss: 3.476394 iter: 3253 | loss: 3.476093 iter: 3254 | loss: 3.475791 iter: 3255 | loss: 3.475490 iter: 3256 | loss: 3.475188 iter: 3257 | loss: 3.474887 iter: 3258 | loss: 3.474585 iter: 3259 | loss: 3.474284 iter: 3260 | loss: 3.473982 iter: 3261 | loss: 3.473681 iter: 3262 | loss: 3.473379 iter: 3263 | loss: 3.473078 iter: 3264 | loss: 3.472776 iter: 3265 | loss: 3.472475 iter: 3266 | loss: 3.472173 iter: 3267 | loss: 3.471872 iter: 3268 | loss: 3.471570 iter: 3269 | loss: 3.471269 iter: 3270 | loss: 3.470967 iter: 3271 | loss: 3.470665 iter: 3272 | loss: 3.470364 iter: 3273 | loss: 3.470062 iter: 3274 | loss: 3.469761 iter: 3275 | loss: 3.469459 iter: 3276 | loss: 3.469158 iter: 3277 | loss: 3.468856 iter: 3278 | loss: 3.468555 iter: 3279 | loss: 3.468253 iter: 3280 | loss: 3.467952 iter: 3281 | loss: 3.467650 iter: 3282 | loss: 3.467349 iter: 3283 | loss: 3.467047 iter: 3284 | loss: 3.466746 iter: 3285 | loss: 3.466444 iter: 3286 | loss: 3.466143 iter: 3287 | loss: 3.465841 iter: 3288 | loss: 3.465540 iter: 3289 | loss: 3.465238 iter: 3290 | loss: 3.464937 iter: 3291 | loss: 3.464635 iter: 3292 | loss: 3.464334 iter: 3293 | loss: 3.464032 iter: 3294 | loss: 3.463731 iter: 3295 | loss: 3.463429 iter: 3296 | loss: 3.463128 iter: 3297 | loss: 3.462826 iter: 3298 | loss: 3.462525 iter: 3299 | loss: 3.462223 iter: 3300 | loss: 3.461922 iter: 3301 | loss: 3.461620 iter: 3302 | loss: 3.461319 iter: 3303 | loss: 3.461017 iter: 3304 | loss: 3.460716 iter: 3305 | loss: 3.460414 iter: 3306 | loss: 3.460113 iter: 3307 | loss: 3.459811 iter: 3308 | loss: 3.459510 iter: 3309 | loss: 3.459208 iter: 3310 | loss: 3.458907 iter: 3311 | loss: 3.458605 iter: 3312 | loss: 3.458304 iter: 3313 | loss: 3.458002 iter: 3314 | loss: 3.457701 iter: 3315 | loss: 3.457399 iter: 3316 | loss: 3.457098 iter: 3317 | loss: 3.456796 iter: 3318 | loss: 3.456495 iter: 3319 | loss: 3.456193 iter: 3320 | loss: 3.455892 iter: 3321 | loss: 3.455590 iter: 3322 | loss: 3.455289 iter: 3323 | loss: 3.454987 iter: 3324 | loss: 3.454685 iter: 3325 | loss: 3.454384 iter: 3326 | loss: 3.454082 iter: 3327 | loss: 3.453781 iter: 3328 | loss: 3.453479 iter: 3329 | loss: 3.453178 iter: 3330 | loss: 3.452876 iter: 3331 | loss: 3.452575 iter: 3332 | loss: 3.452273 iter: 3333 | loss: 3.451972 iter: 3334 | loss: 3.451670 iter: 3335 | loss: 3.451369 iter: 3336 | loss: 3.451067 iter: 3337 | loss: 3.450766 iter: 3338 | loss: 3.450464 iter: 3339 | loss: 3.450163 iter: 3340 | loss: 3.449861 iter: 3341 | loss: 3.449560 iter: 3342 | loss: 3.449258 iter: 3343 | loss: 3.448957 iter: 3344 | loss: 3.448655 iter: 3345 | loss: 3.448354 iter: 3346 | loss: 3.448052 iter: 3347 | loss: 3.447751 iter: 3348 | loss: 3.447449 iter: 3349 | loss: 3.447148 iter: 3350 | loss: 3.446846 iter: 3351 | loss: 3.446545 iter: 3352 | loss: 3.446243 iter: 3353 | loss: 3.445942 iter: 3354 | loss: 3.445640 iter: 3355 | loss: 3.445339 iter: 3356 | loss: 3.445037 iter: 3357 | loss: 3.444736 iter: 3358 | loss: 3.444434 iter: 3359 | loss: 3.444133 iter: 3360 | loss: 3.443831 iter: 3361 | loss: 3.443530 iter: 3362 | loss: 3.443228 iter: 3363 | loss: 3.442927 iter: 3364 | loss: 3.442625 iter: 3365 | loss: 3.442324 iter: 3366 | loss: 3.442022 iter: 3367 | loss: 3.441721 iter: 3368 | loss: 3.441419 iter: 3369 | loss: 3.441118 iter: 3370 | loss: 3.440816 iter: 3371 | loss: 3.440515 iter: 3372 | loss: 3.440213 iter: 3373 | loss: 3.439912 iter: 3374 | loss: 3.439610 iter: 3375 | loss: 3.439308 iter: 3376 | loss: 3.439007 iter: 3377 | loss: 3.438705 iter: 3378 | loss: 3.438404 iter: 3379 | loss: 3.438102 iter: 3380 | loss: 3.437801 iter: 3381 | loss: 3.437499 iter: 3382 | loss: 3.437198 iter: 3383 | loss: 3.436896 iter: 3384 | loss: 3.436595 iter: 3385 | loss: 3.436293 iter: 3386 | loss: 3.435992 iter: 3387 | loss: 3.435690 iter: 3388 | loss: 3.435389 iter: 3389 | loss: 3.435087 iter: 3390 | loss: 3.434786 iter: 3391 | loss: 3.434484 iter: 3392 | loss: 3.434183 iter: 3393 | loss: 3.433881 iter: 3394 | loss: 3.433580 iter: 3395 | loss: 3.433278 iter: 3396 | loss: 3.432977 iter: 3397 | loss: 3.432675 iter: 3398 | loss: 3.432374 iter: 3399 | loss: 3.432072 iter: 3400 | loss: 3.431771 iter: 3401 | loss: 3.431469 iter: 3402 | loss: 3.431168 iter: 3403 | loss: 3.430866 iter: 3404 | loss: 3.430565 iter: 3405 | loss: 3.430263 iter: 3406 | loss: 3.429962 iter: 3407 | loss: 3.429660 iter: 3408 | loss: 3.429359 iter: 3409 | loss: 3.429057 iter: 3410 | loss: 3.428756 iter: 3411 | loss: 3.428454 iter: 3412 | loss: 3.428153 iter: 3413 | loss: 3.427851 iter: 3414 | loss: 3.427550 iter: 3415 | loss: 3.427248 iter: 3416 | loss: 3.426947 iter: 3417 | loss: 3.426645 iter: 3418 | loss: 3.426344 iter: 3419 | loss: 3.426042 iter: 3420 | loss: 3.425741 iter: 3421 | loss: 3.425439 iter: 3422 | loss: 3.425138 iter: 3423 | loss: 3.424836 iter: 3424 | loss: 3.424535 iter: 3425 | loss: 3.424233 iter: 3426 | loss: 3.423932 iter: 3427 | loss: 3.423630 iter: 3428 | loss: 3.423328 iter: 3429 | loss: 3.423027 iter: 3430 | loss: 3.422725 iter: 3431 | loss: 3.422424 iter: 3432 | loss: 3.422122 iter: 3433 | loss: 3.421821 iter: 3434 | loss: 3.421519 iter: 3435 | loss: 3.421218 iter: 3436 | loss: 3.420916 iter: 3437 | loss: 3.420615 iter: 3438 | loss: 3.420313 iter: 3439 | loss: 3.420012 iter: 3440 | loss: 3.419710 iter: 3441 | loss: 3.419409 iter: 3442 | loss: 3.419107 iter: 3443 | loss: 3.418806 iter: 3444 | loss: 3.418504 iter: 3445 | loss: 3.418203 iter: 3446 | loss: 3.417901 iter: 3447 | loss: 3.417600 iter: 3448 | loss: 3.417298 iter: 3449 | loss: 3.416997 iter: 3450 | loss: 3.416695 iter: 3451 | loss: 3.416394 iter: 3452 | loss: 3.416092 iter: 3453 | loss: 3.415791 iter: 3454 | loss: 3.415489 iter: 3455 | loss: 3.415188 iter: 3456 | loss: 3.414886 iter: 3457 | loss: 3.414585 iter: 3458 | loss: 3.414283 iter: 3459 | loss: 3.413982 iter: 3460 | loss: 3.413680 iter: 3461 | loss: 3.413379 iter: 3462 | loss: 3.413077 iter: 3463 | loss: 3.412776 iter: 3464 | loss: 3.412474 iter: 3465 | loss: 3.412173 iter: 3466 | loss: 3.411871 iter: 3467 | loss: 3.411570 iter: 3468 | loss: 3.411268 iter: 3469 | loss: 3.410967 iter: 3470 | loss: 3.410665 iter: 3471 | loss: 3.410364 iter: 3472 | loss: 3.410062 iter: 3473 | loss: 3.409761 iter: 3474 | loss: 3.409459 iter: 3475 | loss: 3.409158 iter: 3476 | loss: 3.408856 iter: 3477 | loss: 3.408555 iter: 3478 | loss: 3.408253 iter: 3479 | loss: 3.407951 iter: 3480 | loss: 3.407650 iter: 3481 | loss: 3.407348 iter: 3482 | loss: 3.407047 iter: 3483 | loss: 3.406745 iter: 3484 | loss: 3.406444 iter: 3485 | loss: 3.406142 iter: 3486 | loss: 3.405841 iter: 3487 | loss: 3.405539 iter: 3488 | loss: 3.405238 iter: 3489 | loss: 3.404936 iter: 3490 | loss: 3.404635 iter: 3491 | loss: 3.404333 iter: 3492 | loss: 3.404032 iter: 3493 | loss: 3.403730 iter: 3494 | loss: 3.403429 iter: 3495 | loss: 3.403127 iter: 3496 | loss: 3.402826 iter: 3497 | loss: 3.402524 iter: 3498 | loss: 3.402223 iter: 3499 | loss: 3.401921 iter: 3500 | loss: 3.401620 iter: 3501 | loss: 3.401318 iter: 3502 | loss: 3.401017 iter: 3503 | loss: 3.400715 iter: 3504 | loss: 3.400414 iter: 3505 | loss: 3.400112 iter: 3506 | loss: 3.399811 iter: 3507 | loss: 3.399509 iter: 3508 | loss: 3.399208 iter: 3509 | loss: 3.398906 iter: 3510 | loss: 3.398605 iter: 3511 | loss: 3.398303 iter: 3512 | loss: 3.398002 iter: 3513 | loss: 3.397700 iter: 3514 | loss: 3.397399 iter: 3515 | loss: 3.397097 iter: 3516 | loss: 3.396796 iter: 3517 | loss: 3.396494 iter: 3518 | loss: 3.396193 iter: 3519 | loss: 3.395891 iter: 3520 | loss: 3.395590 iter: 3521 | loss: 3.395288 iter: 3522 | loss: 3.394987 iter: 3523 | loss: 3.394685 iter: 3524 | loss: 3.394384 iter: 3525 | loss: 3.394082 iter: 3526 | loss: 3.393781 iter: 3527 | loss: 3.393479 iter: 3528 | loss: 3.393178 iter: 3529 | loss: 3.392876 iter: 3530 | loss: 3.392575 iter: 3531 | loss: 3.392273 iter: 3532 | loss: 3.391971 iter: 3533 | loss: 3.391670 iter: 3534 | loss: 3.391368 iter: 3535 | loss: 3.391067 iter: 3536 | loss: 3.390765 iter: 3537 | loss: 3.390464 iter: 3538 | loss: 3.390162 iter: 3539 | loss: 3.389861 iter: 3540 | loss: 3.389559 iter: 3541 | loss: 3.389258 iter: 3542 | loss: 3.388956 iter: 3543 | loss: 3.388655 iter: 3544 | loss: 3.388353 iter: 3545 | loss: 3.388052 iter: 3546 | loss: 3.387750 iter: 3547 | loss: 3.387449 iter: 3548 | loss: 3.387147 iter: 3549 | loss: 3.386846 iter: 3550 | loss: 3.386544 iter: 3551 | loss: 3.386243 iter: 3552 | loss: 3.385941 iter: 3553 | loss: 3.385640 iter: 3554 | loss: 3.385338 iter: 3555 | loss: 3.385037 iter: 3556 | loss: 3.384735 iter: 3557 | loss: 3.384434 iter: 3558 | loss: 3.384132 iter: 3559 | loss: 3.383831 iter: 3560 | loss: 3.383529 iter: 3561 | loss: 3.383228 iter: 3562 | loss: 3.382926 iter: 3563 | loss: 3.382625 iter: 3564 | loss: 3.382323 iter: 3565 | loss: 3.382022 iter: 3566 | loss: 3.381720 iter: 3567 | loss: 3.381419 iter: 3568 | loss: 3.381117 iter: 3569 | loss: 3.380816 iter: 3570 | loss: 3.380514 iter: 3571 | loss: 3.380213 iter: 3572 | loss: 3.379911 iter: 3573 | loss: 3.379610 iter: 3574 | loss: 3.379308 iter: 3575 | loss: 3.379007 iter: 3576 | loss: 3.378705 iter: 3577 | loss: 3.378404 iter: 3578 | loss: 3.378102 iter: 3579 | loss: 3.377801 iter: 3580 | loss: 3.377499 iter: 3581 | loss: 3.377198 iter: 3582 | loss: 3.376896 iter: 3583 | loss: 3.376594 iter: 3584 | loss: 3.376293 iter: 3585 | loss: 3.375991 iter: 3586 | loss: 3.375690 iter: 3587 | loss: 3.375388 iter: 3588 | loss: 3.375087 iter: 3589 | loss: 3.374785 iter: 3590 | loss: 3.374484 iter: 3591 | loss: 3.374182 iter: 3592 | loss: 3.373881 iter: 3593 | loss: 3.373579 iter: 3594 | loss: 3.373278 iter: 3595 | loss: 3.372976 iter: 3596 | loss: 3.372675 iter: 3597 | loss: 3.372373 iter: 3598 | loss: 3.372072 iter: 3599 | loss: 3.371770 iter: 3600 | loss: 3.371469 iter: 3601 | loss: 3.371167 iter: 3602 | loss: 3.370866 iter: 3603 | loss: 3.370564 iter: 3604 | loss: 3.370263 iter: 3605 | loss: 3.369961 iter: 3606 | loss: 3.369660 iter: 3607 | loss: 3.369358 iter: 3608 | loss: 3.369057 iter: 3609 | loss: 3.368755 iter - ...TRUNCATED BY DUNE... - (cd _build/default/examples/opt && ./single.exe) - step: 0 | loss: 7.184738733 step: 10 | loss: 7.179276760 step: 20 | loss: 7.173209253 step: 30 | loss: 7.167143332 step: 40 | loss: 7.161079111 step: 50 | loss: 7.155016660 step: 60 | loss: 7.148956014 step: 70 | loss: 7.142897189 step: 80 | loss: 7.136840193 step: 90 | loss: 7.130785028 step: 100 | loss: 7.124731697 step: 110 | loss: 7.118680200 step: 120 | loss: 7.112630536 step: 130 | loss: 7.106582706 step: 140 | loss: 7.100536709 step: 150 | loss: 7.094492546 step: 160 | loss: 7.088450217 step: 170 | loss: 7.082409722 step: 180 | loss: 7.076371060 step: 190 | loss: 7.070334232 step: 200 | loss: 7.064299237 step: 210 | loss: 7.058266076 step: 220 | loss: 7.052234748 step: 230 | loss: 7.046205254 step: 240 | loss: 7.040177593 step: 250 | loss: 7.034151767 step: 260 | loss: 7.028127774 step: 270 | loss: 7.022105615 step: 280 | loss: 7.016085289 step: 290 | loss: 7.010066798 step: 300 | loss: 7.004050140 step: 310 | loss: 6.998035317 step: 320 | loss: 6.992022328 step: 330 | loss: 6.986011173 step: 340 | loss: 6.980001852 step: 350 | loss: 6.973994366 step: 360 | loss: 6.967988715 step: 370 | loss: 6.961984899 step: 380 | loss: 6.955982917 step: 390 | loss: 6.949982771 step: 400 | loss: 6.943984460 step: 410 | loss: 6.937987985 step: 420 | loss: 6.931993345 step: 430 | loss: 6.926000541 step: 440 | loss: 6.920009573 step: 450 | loss: 6.914020442 step: 460 | loss: 6.908033146 step: 470 | loss: 6.902047688 step: 480 | loss: 6.896064067 step: 490 | loss: 6.890082283 step: 500 | loss: 6.884102336 step: 510 | loss: 6.878124227 step: 520 | loss: 6.872147956 step: 530 | loss: 6.866173524 step: 540 | loss: 6.860200930 step: 550 | loss: 6.854230175 step: 560 | loss: 6.848261259 step: 570 | loss: 6.842294182 step: 580 | loss: 6.836328945 step: 590 | loss: 6.830365549 step: 600 | loss: 6.824403993 step: 610 | loss: 6.818444277 step: 620 | loss: 6.812486403 step: 630 | loss: 6.806530371 step: 640 | loss: 6.800576180 step: 650 | loss: 6.794623832 step: 660 | loss: 6.788673326 step: 670 | loss: 6.782724663 step: 680 | loss: 6.776777844 step: 690 | loss: 6.770832868 step: 700 | loss: 6.764889737 step: 710 | loss: 6.758948451 step: 720 | loss: 6.753009009 step: 730 | loss: 6.747071413 step: 740 | loss: 6.741135664 step: 750 | loss: 6.735201760 step: 760 | loss: 6.729269704 step: 770 | loss: 6.723339495 step: 780 | loss: 6.717411134 step: 790 | loss: 6.711484621 step: 800 | loss: 6.705559957 step: 810 | loss: 6.699637142 step: 820 | loss: 6.693716178 step: 830 | loss: 6.687797063 step: 840 | loss: 6.681879800 step: 850 | loss: 6.675964388 step: 860 | loss: 6.670050828 step: 870 | loss: 6.664139120 step: 880 | loss: 6.658229266 step: 890 | loss: 6.652321265 step: 900 | loss: 6.646415118 step: 910 | loss: 6.640510826 step: 920 | loss: 6.634608389 step: 930 | loss: 6.628707808 step: 940 | loss: 6.622809084 step: 950 | loss: 6.616912217 step: 960 | loss: 6.611017207 step: 970 | loss: 6.605124056 step: 980 | loss: 6.599232764 step: 990 | loss: 6.593343331 step: 1000 | loss: 6.587455758 step: 1010 | loss: 6.581570047 step: 1020 | loss: 6.575686197 step: 1030 | loss: 6.569804208 step: 1040 | loss: 6.563924083 step: 1050 | loss: 6.558045821 step: 1060 | loss: 6.552169424 step: 1070 | loss: 6.546294891 step: 1080 | loss: 6.540422224 step: 1090 | loss: 6.534551422 step: 1100 | loss: 6.528682488 step: 1110 | loss: 6.522815421 step: 1120 | loss: 6.516950223 step: 1130 | loss: 6.511086894 step: 1140 | loss: 6.505225434 step: 1150 | loss: 6.499365845 step: 1160 | loss: 6.493508127 step: 1170 | loss: 6.487652281 step: 1180 | loss: 6.481798307 step: 1190 | loss: 6.475946207 step: 1200 | loss: 6.470095981 step: 1210 | loss: 6.464247631 step: 1220 | loss: 6.458401155 step: 1230 | loss: 6.452556557 step: 1240 | loss: 6.446713835 step: 1250 | loss: 6.440872992 step: 1260 | loss: 6.435034027 step: 1270 | loss: 6.429196942 step: 1280 | loss: 6.423361738 step: 1290 | loss: 6.417528415 step: 1300 | loss: 6.411696973 step: 1310 | loss: 6.405867415 step: 1320 | loss: 6.400039740 step: 1330 | loss: 6.394213950 step: 1340 | loss: 6.388390045 step: 1350 | loss: 6.382568026 step: 1360 | loss: 6.376747894 step: 1370 | loss: 6.370929650 step: 1380 | loss: 6.365113295 step: 1390 | loss: 6.359298829 step: 1400 | loss: 6.353486254 step: 1410 | loss: 6.347675569 step: 1420 | loss: 6.341866777 step: 1430 | loss: 6.336059878 step: 1440 | loss: 6.330254873 step: 1450 | loss: 6.324451762 step: 1460 | loss: 6.318650547 step: 1470 | loss: 6.312851228 step: 1480 | loss: 6.307053807 step: 1490 | loss: 6.301258284 step: 1500 | loss: 6.295464660 step: 1510 | loss: 6.289672936 step: 1520 | loss: 6.283883113 step: 1530 | loss: 6.278095192 step: 1540 | loss: 6.272309173 step: 1550 | loss: 6.266525059 step: 1560 | loss: 6.260742848 step: 1570 | loss: 6.254962544 step: 1580 | loss: 6.249184145 step: 1590 | loss: 6.243407654 step: 1600 | loss: 6.237633071 step: 1610 | loss: 6.231860398 step: 1620 | loss: 6.226089634 step: 1630 | loss: 6.220320782 step: 1640 | loss: 6.214553841 step: 1650 | loss: 6.208788813 step: 1660 | loss: 6.203025699 step: 1670 | loss: 6.197264500 step: 1680 | loss: 6.191505217 step: 1690 | loss: 6.185747850 step: 1700 | loss: 6.179992401 step: 1710 | loss: 6.174238870 step: 1720 | loss: 6.168487259 step: 1730 | loss: 6.162737568 step: 1740 | loss: 6.156989799 step: 1750 | loss: 6.151243952 step: 1760 | loss: 6.145500028 step: 1770 | loss: 6.139758029 step: 1780 | loss: 6.134017955 step: 1790 | loss: 6.128279807 step: 1800 | loss: 6.122543586 step: 1810 | loss: 6.116809293 step: 1820 | loss: 6.111076929 step: 1830 | loss: 6.105346496 step: 1840 | loss: 6.099617993 step: 1850 | loss: 6.093891422 step: 1860 | loss: 6.088166784 step: 1870 | loss: 6.082444080 step: 1880 | loss: 6.076723311 step: 1890 | loss: 6.071004478 step: 1900 | loss: 6.065287581 step: 1910 | loss: 6.059572622 step: 1920 | loss: 6.053859602 step: 1930 | loss: 6.048148521 step: 1940 | loss: 6.042439381 step: 1950 | loss: 6.036732183 step: 1960 | loss: 6.031026927 step: 1970 | loss: 6.025323614 step: 1980 | loss: 6.019622246 step: 1990 | loss: 6.013922823 step: 2000 | loss: 6.008225347 step: 2010 | loss: 6.002529818 step: 2020 | loss: 5.996836237 step: 2030 | loss: 5.991144605 step: 2040 | loss: 5.985454924 step: 2050 | loss: 5.979767193 step: 2060 | loss: 5.974081415 step: 2070 | loss: 5.968397589 step: 2080 | loss: 5.962715718 step: 2090 | loss: 5.957035801 step: 2100 | loss: 5.951357840 step: 2110 | loss: 5.945681836 step: 2120 | loss: 5.940007789 step: 2130 | loss: 5.934335701 step: 2140 | loss: 5.928665573 step: 2150 | loss: 5.922997405 step: 2160 | loss: 5.917331198 step: 2170 | loss: 5.911666953 step: 2180 | loss: 5.906004672 step: 2190 | loss: 5.900344355 step: 2200 | loss: 5.894686002 step: 2210 | loss: 5.889029616 step: 2220 | loss: 5.883375196 step: 2230 | loss: 5.877722744 step: 2240 | loss: 5.872072261 step: 2250 | loss: 5.866423747 step: 2260 | loss: 5.860777203 step: 2270 | loss: 5.855132631 step: 2280 | loss: 5.849490030 step: 2290 | loss: 5.843849403 step: 2300 | loss: 5.838210749 step: 2310 | loss: 5.832574070 step: 2320 | loss: 5.826939366 step: 2330 | loss: 5.821306639 step: 2340 | loss: 5.815675889 step: 2350 | loss: 5.810047117 step: 2360 | loss: 5.804420324 step: 2370 | loss: 5.798795510 step: 2380 | loss: 5.793172677 step: 2390 | loss: 5.787551825 step: 2400 | loss: 5.781932956 step: 2410 | loss: 5.776316069 step: 2420 | loss: 5.770701166 step: 2430 | loss: 5.765088247 step: 2440 | loss: 5.759477314 step: 2450 | loss: 5.753868367 step: 2460 | loss: 5.748261406 step: 2470 | loss: 5.742656433 step: 2480 | loss: 5.737053448 step: 2490 | loss: 5.731452453 step: 2500 | loss: 5.725853447 step: 2510 | loss: 5.720256432 step: 2520 | loss: 5.714661407 step: 2530 | loss: 5.709068375 step: 2540 | loss: 5.703477336 step: 2550 | loss: 5.697888290 step: 2560 | loss: 5.692301237 step: 2570 | loss: 5.686716180 step: 2580 | loss: 5.681133118 step: 2590 | loss: 5.675552052 step: 2600 | loss: 5.669972982 step: 2610 | loss: 5.664395910 step: 2620 | loss: 5.658820836 step: 2630 | loss: 5.653247761 step: 2640 | loss: 5.647676684 step: 2650 | loss: 5.642107608 step: 2660 | loss: 5.636540532 step: 2670 | loss: 5.630975457 step: 2680 | loss: 5.625412383 step: 2690 | loss: 5.619851312 step: 2700 | loss: 5.614292243 step: 2710 | loss: 5.608735177 step: 2720 | loss: 5.603180116 step: 2730 | loss: 5.597627058 step: 2740 | loss: 5.592076006 step: 2750 | loss: 5.586526958 step: 2760 | loss: 5.580979917 step: 2770 | loss: 5.575434882 step: 2780 | loss: 5.569891853 step: 2790 | loss: 5.564350832 step: 2800 | loss: 5.558811818 step: 2810 | loss: 5.553274813 step: 2820 | loss: 5.547739816 step: 2830 | loss: 5.542206828 step: 2840 | loss: 5.536675849 step: 2850 | loss: 5.531146880 step: 2860 | loss: 5.525619921 step: 2870 | loss: 5.520094973 step: 2880 | loss: 5.514572035 step: 2890 | loss: 5.509051109 step: 2900 | loss: 5.503532194 step: 2910 | loss: 5.498015291 step: 2920 | loss: 5.492500400 step: 2930 | loss: 5.486987521 step: 2940 | loss: 5.481476655 step: 2950 | loss: 5.475967801 step: 2960 | loss: 5.470460961 step: 2970 | loss: 5.464956135 step: 2980 | loss: 5.459453322 step: 2990 | loss: 5.453952522 step: 3000 | loss: 5.448453737 step: 3010 | loss: 5.442956966 step: 3020 | loss: 5.437462209 step: 3030 | loss: 5.431969467 step: 3040 | loss: 5.426478739 step: 3050 | loss: 5.420990026 step: 3060 | loss: 5.415503328 step: 3070 | loss: 5.410018645 step: 3080 | loss: 5.404535977 step: 3090 | loss: 5.399055324 step: 3100 | loss: 5.393576686 step: 3110 | loss: 5.388100064 step: 3120 | loss: 5.382625456 step: 3130 | loss: 5.377152864 step: 3140 | loss: 5.371682287 step: 3150 | loss: 5.366213725 step: 3160 | loss: 5.360747178 step: 3170 | loss: 5.355282647 step: 3180 | loss: 5.349820130 step: 3190 | loss: 5.344359628 step: 3200 | loss: 5.338901142 step: 3210 | loss: 5.333444669 step: 3220 | loss: 5.327990212 step: 3230 | loss: 5.322537768 step: 3240 | loss: 5.317087340 step: 3250 | loss: 5.311638925 step: 3260 | loss: 5.306192524 step: 3270 | loss: 5.300748136 step: 3280 | loss: 5.295305763 step: 3290 | loss: 5.289865402 step: 3300 | loss: 5.284427054 step: 3310 | loss: 5.278990719 step: 3320 | loss: 5.273556397 step: 3330 | loss: 5.268124086 step: 3340 | loss: 5.262693788 step: 3350 | loss: 5.257265500 step: 3360 | loss: 5.251839224 step: 3370 | loss: 5.246414959 step: 3380 | loss: 5.240992704 step: 3390 | loss: 5.235572459 step: 3400 | loss: 5.230154223 step: 3410 | loss: 5.224737997 step: 3420 | loss: 5.219323779 step: 3430 | loss: 5.213911569 step: 3440 | loss: 5.208501368 step: 3450 | loss: 5.203093173 step: 3460 | loss: 5.197686985 step: 3470 | loss: 5.192282804 step: 3480 | loss: 5.186880628 step: 3490 | loss: 5.181480457 step: 3500 | loss: 5.176082291 step: 3510 | loss: 5.170686128 step: 3520 | loss: 5.165291970 step: 3530 | loss: 5.159899813 step: 3540 | loss: 5.154509659 step: 3550 | loss: 5.149121507 step: 3560 | loss: 5.143735355 step: 3570 | loss: 5.138351204 step: 3580 | loss: 5.132969052 step: 3590 | loss: 5.127588899 step: 3600 | loss: 5.122210744 step: 3610 | loss: 5.116834586 step: 3620 | loss: 5.111460425 step: 3630 | loss: 5.106088260 step: 3640 | loss: 5.100718090 step: 3650 | loss: 5.095349914 step: 3660 | loss: 5.089983732 step: 3670 | loss: 5.084619543 step: 3680 | loss: 5.079257345 step: 3690 | loss: 5.073897139 step: 3700 | loss: 5.068538922 step: 3710 | loss: 5.063182695 step: 3720 | loss: 5.057828457 step: 3730 | loss: 5.052476206 step: 3740 | loss: 5.047125942 step: 3750 | loss: 5.041777664 step: 3760 | loss: 5.036431370 step: 3770 | loss: 5.031087061 step: 3780 | loss: 5.025744734 step: 3790 | loss: 5.020404390 step: 3800 | loss: 5.015066027 step: 3810 | loss: 5.009729643 step: 3820 | loss: 5.004395240 step: 3830 | loss: 4.999062814 step: 3840 | loss: 4.993732365 step: 3850 | loss: 4.988403893 step: 3860 | loss: 4.983077396 step: 3870 | loss: 4.977752873 step: 3880 | loss: 4.972430323 step: 3890 | loss: 4.967109745 step: 3900 | loss: 4.961791138 step: 3910 | loss: 4.956474501 step: 3920 | loss: 4.951159833 step: 3930 | loss: 4.945847133 step: 3940 | loss: 4.940536400 step: 3950 | loss: 4.935227632 step: 3960 | loss: 4.929920829 step: 3970 | loss: 4.924615989 step: 3980 | loss: 4.919313112 step: 3990 | loss: 4.914012197 step: 4000 | loss: 4.908713241 step: 4010 | loss: 4.903416245 step: 4020 | loss: 4.898121206 step: 4030 | loss: 4.892828125 step: 4040 | loss: 4.887536999 step: 4050 | loss: 4.882247828 step: 4060 | loss: 4.876960611 step: 4070 | loss: 4.871675346 step: 4080 | loss: 4.866392033 step: 4090 | loss: 4.861110669 step: 4100 | loss: 4.855831255 step: 4110 | loss: 4.850553789 step: 4120 | loss: 4.845278270 step: 4130 | loss: 4.840004696 step: 4140 | loss: 4.834733067 step: 4150 | loss: 4.829463382 step: 4160 | loss: 4.824195639 step: 4170 | loss: 4.818929838 step: 4180 | loss: 4.813665977 step: 4190 | loss: 4.808404055 step: 4200 | loss: 4.803144071 step: 4210 | loss: 4.797886024 step: 4220 | loss: 4.792629913 step: 4230 | loss: 4.787375736 step: 4240 | loss: 4.782123494 step: 4250 | loss: 4.776873185 step: 4260 | loss: 4.771624807 step: 4270 | loss: 4.766378359 step: 4280 | loss: 4.761133842 step: 4290 | loss: 4.755891253 step: 4300 | loss: 4.750650592 step: 4310 | loss: 4.745411857 step: 4320 | loss: 4.740175048 step: 4330 | loss: 4.734940164 step: 4340 | loss: 4.729707203 step: 4350 | loss: 4.724476165 step: 4360 | loss: 4.719247049 step: 4370 | loss: 4.714019854 step: 4380 | loss: 4.708794579 step: 4390 | loss: 4.703571223 step: 4400 | loss: 4.698349785 step: 4410 | loss: 4.693130265 step: 4420 | loss: 4.687912661 step: 4430 | loss: 4.682696973 step: 4440 | loss: 4.677483200 step: 4450 | loss: 4.672271341 step: 4460 | loss: 4.667061395 step: 4470 | loss: 4.661853362 step: 4480 | loss: 4.656647241 step: 4490 | loss: 4.651443031 step: 4500 | loss: 4.646240731 step: 4510 | loss: 4.641040341 step: 4520 | loss: 4.635841860 step: 4530 | loss: 4.630645288 step: 4540 | loss: 4.625450623 step: 4550 | loss: 4.620257866 step: 4560 | loss: 4.615067016 step: 4570 | loss: 4.609878071 step: 4580 | loss: 4.604691033 step: 4590 | loss: 4.599505900 step: 4600 | loss: 4.594322671 step: 4610 | loss: 4.589141347 step: 4620 | loss: 4.583961926 step: 4630 | loss: 4.578784409 step: 4640 | loss: 4.573608795 step: 4650 | loss: 4.568435084 step: 4660 | loss: 4.563263276 step: 4670 | loss: 4.558093369 step: 4680 | loss: 4.552925365 step: 4690 | loss: 4.547759262 step: 4700 | loss: 4.542595061 step: 4710 | loss: 4.537432761 step: 4720 | loss: 4.532272362 step: 4730 | loss: 4.527113865 step: 4740 | loss: 4.521957268 step: 4750 | loss: 4.516802573 step: 4760 | loss: 4.511649778 step: 4770 | loss: 4.506498885 step: 4780 | loss: 4.501349892 step: 4790 | loss: 4.496202801 step: 4800 | loss: 4.491057611 step: 4810 | loss: 4.485914322 step: 4820 | loss: 4.480772934 step: 4830 | loss: 4.475633448 step: 4840 | loss: 4.470495865 step: 4850 | loss: 4.465360183 step: 4860 | loss: 4.460226404 step: 4870 | loss: 4.455094527 step: 4880 | loss: 4.449964554 step: 4890 | loss: 4.444836484 step: 4900 | loss: 4.439710319 step: 4910 | loss: 4.434586057 step: 4920 | loss: 4.429463701 step: 4930 | loss: 4.424343250 step: 4940 | loss: 4.419224704 step: 4950 | loss: 4.414108066 step: 4960 | loss: 4.408993334 step: 4970 | loss: 4.403880511 step: 4980 | loss: 4.398769595 step: 4990 | loss: 4.393660589 step: 5000 | loss: 4.388553493 step: 5010 | loss: 4.383448307 step: 5020 | loss: 4.378345033 step: 5030 | loss: 4.373243672 step: 5040 | loss: 4.368144223 step: 5050 | loss: 4.363046688 step: 5060 | loss: 4.357951069 step: 5070 | loss: 4.352857365 step: 5080 | loss: 4.347765578 step: 5090 | loss: 4.342675709 step: 5100 | loss: 4.337587759 step: 5110 | loss: 4.332501729 step: 5120 | loss: 4.327417620 step: 5130 | loss: 4.322335433 step: 5140 | loss: 4.317255170 step: 5150 | loss: 4.312176831 step: 5160 | loss: 4.307100418 step: 5170 | loss: 4.302025933 step: 5180 | loss: 4.296953375 step: 5190 | loss: 4.291882747 step: 5200 | loss: 4.286814050 step: 5210 | loss: 4.281747284 step: 5220 | loss: 4.276682453 step: 5230 | loss: 4.271619557 step: 5240 | loss: 4.266558596 step: 5250 | loss: 4.261499574 step: 5260 | loss: 4.256442491 step: 5270 | loss: 4.251387349 step: 5280 | loss: 4.246334149 step: 5290 | loss: 4.241282892 step: 5300 | loss: 4.236233581 step: 5310 | loss: 4.231186217 step: 5320 | loss: 4.226140801 step: 5330 | loss: 4.221097336 step: 5340 | loss: 4.216055822 step: 5350 | loss: 4.211016261 step: 5360 | loss: 4.205978655 step: 5370 | loss: 4.200943006 step: 5380 | loss: 4.195909315 step: 5390 | loss: 4.190877584 step: 5400 | loss: 4.185847815 step: 5410 | loss: 4.180820009 step: 5420 | loss: 4.175794169 step: 5430 | loss: 4.170770296 step: 5440 | loss: 4.165748391 step: 5450 | loss: 4.160728457 step: 5460 | loss: 4.155710495 step: 5470 | loss: 4.150694507 step: 5480 | loss: 4.145680495 step: 5490 | loss: 4.140668461 step: 5500 | loss: 4.135658406 step: 5510 | loss: 4.130650333 step: 5520 | loss: 4.125644243 step: 5530 | loss: 4.120640139 step: 5540 | loss: 4.115638021 step: 5550 | loss: 4.110637892 step: 5560 | loss: 4.105639754 step: 5570 | loss: 4.100643608 step: 5580 | loss: 4.095649457 step: 5590 | loss: 4.090657302 step: 5600 | loss: 4.085667145 step: 5610 | loss: 4.080678988 step: 5620 | loss: 4.075692834 step: 5630 | loss: 4.070708683 step: 5640 | loss: 4.065726538 step: 5650 | loss: 4.060746400 step: 5660 | loss: 4.055768272 step: 5670 | loss: 4.050792155 step: 5680 | loss: 4.045818052 step: 5690 | loss: 4.040845964 step: 5700 | loss: 4.035875892 step: 5710 | loss: 4.030907840 step: 5720 | loss: 4.025941808 step: 5730 | loss: 4.020977798 step: 5740 | loss: 4.016015813 step: 5750 | loss: 4.011055854 step: 5760 | loss: 4.006097923 step: 5770 | loss: 4.001142022 step: 5780 | loss: 3.996188152 step: 5790 | loss: 3.991236316 step: 5800 | loss: 3.986286515 step: 5810 | loss: 3.981338751 step: 5820 | loss: 3.976393026 step: 5830 | loss: 3.971449341 step: 5840 | loss: 3.966507699 step: 5850 | loss: 3.961568100 step: 5860 | loss: 3.956630547 step: 5870 | loss: 3.951695041 step: 5880 | loss: 3.946761584 step: 5890 | loss: 3.941830178 step: 5900 | loss: 3.936900824 step: 5910 | loss: 3.931973524 step: 5920 | loss: 3.927048280 step: 5930 | loss: 3.922125093 step: 5940 | loss: 3.917203964 step: 5950 | loss: 3.912284896 step: 5960 | loss: 3.907367890 step: 5970 | loss: 3.902452947 step: 5980 | loss: 3.897540069 step: 5990 | loss: 3.892629257 step: 6000 | loss: 3.887720513 step: 6010 | loss: 3.882813838 step: 6020 | loss: 3.877909233 step: 6030 | loss: 3.873006701 step: 6040 | loss: 3.868106242 step: 6050 | loss: 3.863207858 step: 6060 | loss: 3.858311550 step: 6070 | loss: 3.853417319 step: 6080 | loss: 3.848525166 step: 6090 | loss: 3.843635094 step: 6100 | loss: 3.838747103 step: 6110 | loss: 3.833861194 step: 6120 | loss: 3.828977368 step: 6130 | loss: 3.824095627 step: 6140 | loss: 3.819215971 step: 6150 | loss: 3.814338403 step: 6160 | loss: 3.809462922 step: 6170 | loss: 3.804589530 step: 6180 | loss: 3.799718228 step: 6190 | loss: 3.794849016 step: 6200 | loss: 3.789981897 step: 6210 | loss: 3.785116870 step: 6220 | loss: 3.780253937 step: 6230 | loss: 3.775393098 step: 6240 | loss: 3.770534354 step: 6250 | loss: 3.765677706 step: 6260 | loss: 3.760823156 step: 6270 | loss: 3.755970702 step: 6280 | loss: 3.751120347 step: 6290 | loss: 3.746272091 step: 6300 | loss: 3.741425934 step: 6310 | loss: 3.736581877 step: 6320 | loss: 3.731739921 step: 6330 | loss: 3.726900065 step: 6340 | loss: 3.722062312 step: 6350 | loss: 3.717226661 step: 6360 | loss: 3.712393112 step: 6370 | loss: 3.707561666 step: 6380 | loss: 3.702732323 step: 6390 | loss: 3.697905084 step: 6400 | loss: 3.693079948 step: 6410 | loss: 3.688256917 step: 6420 | loss: 3.683435990 step: 6430 | loss: 3.678617167 step: 6440 | loss: 3.673800449 step: 6450 | loss: 3.668985836 step: 6460 | loss: 3.664173327 step: 6470 | loss: 3.659362923 step: 6480 | loss: 3.654554623 step: 6490 | loss: 3.649748429 step: 6500 | loss: 3.644944338 step: 6510 | loss: 3.640142352 step: 6520 | loss: 3.635342471 step: 6530 | loss: 3.630544693 step: 6540 | loss: 3.625749019 step: 6550 | loss: 3.620955448 step: 6560 | loss: 3.616163981 step: 6570 | loss: 3.611374616 step: 6580 | loss: 3.606587353 step: 6590 | loss: 3.601802192 step: 6600 | loss: 3.597019132 step: 6610 | loss: 3.592238173 step: 6620 | loss: 3.587459314 step: 6630 | loss: 3.582682555 step: 6640 | loss: 3.577907894 step: 6650 | loss: 3.573135332 step: 6660 | loss: 3.568364867 step: 6670 | loss: 3.563596498 step: 6680 | loss: 3.558830225 step: 6690 | loss: 3.554066047 step: 6700 | loss: 3.549303963 step: 6710 | loss: 3.544543971 step: 6720 | loss: 3.539786071 step: 6730 | loss: 3.535030263 step: 6740 | loss: 3.530276543 step: 6750 | loss: 3.525524913 step: 6760 | loss: 3.520775369 step: 6770 | loss: 3.516027912 step: 6780 | loss: 3.511282540 step: 6790 | loss: 3.506539251 step: 6800 | loss: 3.501798044 step: 6810 | loss: 3.497058918 step: 6820 | loss: 3.492321871 step: 6830 | loss: 3.487586902 step: 6840 | loss: 3.482854008 step: 6850 | loss: 3.478123190 step: 6860 | loss: 3.473394444 step: 6870 | loss: 3.468667770 step: 6880 | loss: 3.463943165 step: 6890 | loss: 3.459220628 step: 6900 | loss: 3.454500156 step: 6910 | loss: 3.449781749 step: 6920 | loss: 3.445065404 step: 6930 | loss: 3.440351119 step: 6940 | loss: 3.435638892 step: 6950 | loss: 3.430928721 step: 6960 | loss: 3.426220604 step: 6970 | loss: 3.421514539 step: 6980 | loss: 3.416810524 step: 6990 | loss: 3.412108556 step: 7000 | loss: 3.407408634 step: 7010 | loss: 3.402710754 step: 7020 | loss: 3.398014915 step: 7030 | loss: 3.393321114 step: 7040 | loss: 3.388629349 step: 7050 | loss: 3.383939616 step: 7060 | loss: 3.379251915 step: 7070 | loss: 3.374566241 step: 7080 | loss: 3.369882593 step: 7090 | loss: 3.365200968 step: 7100 | loss: 3.360521363 step: 7110 | loss: 3.355843775 step: 7120 | loss: 3.351168202 step: 7130 | loss: 3.346494640 step: 7140 | loss: 3.341823087 step: 7150 | loss: 3.337153540 step: 7160 | loss: 3.332485996 step: 7170 | loss: 3.327820452 step: 7180 | loss: 3.323156905 step: 7190 | loss: 3.318495351 step: 7200 | loss: 3.313835788 step: 7210 | loss: 3.309178213 step: 7220 | loss: 3.304522622 step: 7230 | loss: 3.299869012 step: 7240 | loss: 3.295217380 step: 7250 | loss: 3.290567722 step: 7260 | loss: 3.285920036 step: 7270 | loss: 3.281274317 step: 7280 | loss: 3.276630562 step: 7290 | loss: 3.271988768 step: 7300 | loss: 3.267348931 step: 7310 | loss: 3.262711048 step: 7320 | loss: 3.258075115 step: 7330 | loss: 3.253441129 step: 7340 | loss: 3.248809085 step: 7350 | loss: 3.244178981 step: 7360 | loss: 3.239550812 step: 7370 | loss: 3.234924574 step: 7380 | loss: 3.230300265 step: 7390 | loss: 3.225677879 step: 7400 | loss: 3.221057414 step: 7410 | loss: 3.216438865 step: 7420 | loss: 3.211822228 step: 7430 | loss: 3.207207500 step: 7440 | loss: 3.202594677 step: 7450 | loss: 3.197983753 step: 7460 | loss: 3.193374727 step: 7470 | loss: 3.188767592 step: 7480 | loss: 3.184162346 step: 7490 | loss: 3.179558984 step: 7500 | loss: 3.174957502 step: 7510 | loss: 3.170357896 step: 7520 | loss: 3.165760161 step: 7530 | loss: 3.161164294 step: 7540 | loss: 3.156570290 step: 7550 | loss: 3.151978145 step: 7560 | loss: 3.147387855 step: 7570 | loss: 3.142799414 step: 7580 | loss: 3.138212820 step: 7590 | loss: 3.133628067 step: 7600 | loss: 3.129045152 step: 7610 | loss: 3.124464069 step: 7620 | loss: 3.119884815 step: 7630 | loss: 3.115307385 step: 7640 | loss: 3.110731774 step: 7650 | loss: 3.106157979 step: 7660 | loss: 3.101585994 step: 7670 | loss: 3.097015815 step: 7680 | loss: 3.092447438 step: 7690 | loss: 3.087880858 step: 7700 | loss: 3.083316070 step: 7710 | loss: 3.078753071 step: 7720 | loss: 3.074191854 step: 7730 | loss: 3.069632417 step: 7740 | loss: 3.065074754 step: 7750 | loss: 3.060518861 step: 7760 | loss: 3.055964732 step: 7770 | loss: 3.051412365 step: 7780 | loss: 3.046861753 step: 7790 | loss: 3.042312892 step: 7800 | loss: 3.037765778 step: 7810 | loss: 3.033220406 step: 7820 | loss: 3.028676771 step: 7830 | loss: 3.024134869 step: 7840 | loss: 3.019594695 step: 7850 | loss: 3.015056244 step: 7860 | loss: 3.010519512 step: 7870 | loss: 3.005984495 step: 7880 | loss: 3.001451186 step: 7890 | loss: 2.996919582 step: 7900 | loss: 2.992389679 step: 7910 | loss: 2.987861471 step: 7920 | loss: 2.983334953 step: 7930 | loss: 2.978810122 step: 7940 | loss: 2.974286972 step: 7950 | loss: 2.969765499 step: 7960 | loss: 2.965245699 step: 7970 | loss: 2.960727565 step: 7980 | loss: 2.956211095 step: 7990 | loss: 2.951696283 step: 8000 | loss: 2.947183124 step: 8010 | loss: 2.942671614 step: 8020 | loss: 2.938161749 step: 8030 | loss: 2.933653523 step: 8040 | loss: 2.929146933 step: 8050 | loss: 2.924641972 step: 8060 | loss: 2.920138638 step: 8070 | loss: 2.915636925 step: 8080 | loss: 2.911136829 step: 8090 | loss: 2.906638344 step: 8100 | loss: 2.902141468 step: 8110 | loss: 2.897646194 step: 8120 | loss: 2.893152519 step: 8130 | loss: 2.888660437 step: 8140 | loss: 2.884169945 step: 8150 | loss: 2.879681037 step: 8160 | loss: 2.875193710 step: 8170 | loss: 2.870707958 step: 8180 | loss: 2.866223777 step: 8190 | loss: 2.861741163 step: 8200 | loss: 2.857260112 step: 8210 | loss: 2.852780617 step: 8220 | loss: 2.848302676 step: 8230 | loss: 2.843826284 step: 8240 | loss: 2.839351436 step: 8250 | loss: 2.834878128 step: 8260 | loss: 2.830406355 step: 8270 | loss: 2.825936112 step: 8280 | loss: 2.821467397 step: 8290 | loss: 2.817000203 step: 8300 | loss: 2.812534526 step: 8310 | loss: 2.808070363 step: 8320 | loss: 2.803607708 step: 8330 | loss: 2.799146558 step: 8340 | loss: 2.794686907 step: 8350 | loss: 2.790228752 step: 8360 | loss: 2.785772088 step: 8370 | loss: 2.781316910 step: 8380 | loss: 2.776863215 step: 8390 | loss: 2.772410997 step: 8400 | loss: 2.767960253 step: 8410 | loss: 2.763510978 step: 8420 | loss: 2.759063167 step: 8430 | loss: 2.754616816 step: 8440 | loss: 2.750171922 step: 8450 | loss: 2.745728478 step: 8460 | loss: 2.741286482 step: 8470 | loss: 2.736845928 step: 8480 | loss: 2.732406812 step: 8490 | loss: 2.727969130 step: 8500 | loss: 2.723532878 step: 8510 | loss: 2.719098050 step: 8520 | loss: 2.714664642 step: 8530 | loss: 2.710232651 step: 8540 | loss: 2.705802071 step: 8550 | loss: 2.701372898 step: 8560 | loss: 2.696945128 step: 8570 | loss: 2.692518756 step: 8580 | loss: 2.688093778 step: 8590 | loss: 2.683670188 step: 8600 | loss: 2.679247984 step: 8610 | loss: 2.674827159 step: 8620 | loss: 2.670407710 step: 8630 | loss: 2.665989631 step: 8640 | loss: 2.661572919 step: 8650 | loss: 2.657157569 step: 8660 | loss: 2.652743576 step: 8670 | loss: 2.648330935 step: 8680 | loss: 2.643919643 step: 8690 | loss: 2.639509693 step: 8700 | loss: 2.635101082 step: 8710 | loss: 2.630693804 step: 8720 | loss: 2.626287856 step: 8730 | loss: 2.621883231 step: 8740 | loss: 2.617479926 step: 8750 | loss: 2.613077936 step: 8760 | loss: 2.608677255 step: 8770 | loss: 2.604277879 step: 8780 | loss: 2.599879803 step: 8790 | loss: 2.595483021 step: 8800 | loss: 2.591087530 step: 8810 | loss: 2.586693324 step: 8820 | loss: 2.582300397 step: 8830 | loss: 2.577908746 step: 8840 | loss: 2.573518364 step: 8850 | loss: 2.569129247 step: 8860 | loss: 2.564741389 step: 8870 | loss: 2.560354785 step: 8880 | loss: 2.555969430 step: 8890 | loss: 2.551585319 step: 8900 | loss: 2.547202447 step: 8910 | loss: 2.542820807 step: 8920 | loss: 2.538440395 step: 8930 | loss: 2.534061205 step: 8940 | loss: 2.529683231 step: 8950 | loss: 2.525306469 step: 8960 | loss: 2.520930912 step: 8970 | loss: 2.516556555 step: 8980 | loss: 2.512183392 step: 8990 | loss: 2.507811418 step: 9000 | loss: 2.503440627 step: 9010 | loss: 2.499071013 step: 9020 | loss: 2.494702570 step: 9030 | loss: 2.490335292 step: 9040 | loss: 2.485969174 step: 9050 | loss: 2.481604210 step: 9060 | loss: 2.477240392 step: 9070 | loss: 2.472877717 step: 9080 | loss: 2.468516176 step: 9090 | loss: 2.464155765 step: 9100 | loss: 2.459796477 step: 9110 | loss: 2.455438305 step: 9120 | loss: 2.451081244 step: 9130 | loss: 2.446725287 step: 9140 | loss: 2.442370428 step: 9150 | loss: 2.438016660 step: 9160 | loss: 2.433663977 step: 9170 | loss: 2.429312373 step: 9180 | loss: 2.424961840 step: 9190 | loss: 2.420612373 step: 9200 | loss: 2.416263963 step: 9210 | loss: 2.411916606 step: 9220 | loss: 2.407570294 step: 9230 | loss: 2.403225021 step: 9240 | loss: 2.398880778 step: 9250 | loss: 2.394537561 step: 9260 | loss: 2.390195361 step: 9270 | loss: 2.385854172 step: 9280 | loss: 2.381513986 step: 9290 | loss: 2.377174798 step: 9300 | loss: 2.372836599 step: 9310 | loss: 2.368499382 step: 9320 | loss: 2.364163141 step: 9330 | loss: 2.359827868 step: 9340 | loss: 2.355493555 step: 9350 | loss: 2.351160197 step: 9360 | loss: 2.346827784 step: 9370 | loss: 2.342496311 step: 9380 | loss: 2.338165769 step: 9390 | loss: 2.333836151 step: 9400 | loss: 2.329507450 step: 9410 | loss: 2.325179657 step: 9420 | loss: 2.320852767 step: 9430 | loss: 2.316526770 step: 9440 | loss: 2.312201660 step: 9450 | loss: 2.307877429 step: 9460 | loss: 2.303554069 step: 9470 | loss: 2.299231572 step: 9480 | loss: 2.294909932 step: 9490 | loss: 2.290589139 step: 9500 | loss: 2.286269187 step: 9510 | loss: 2.281950067 step: 9520 | loss: 2.277631772 step: 9530 | loss: 2.273314294 step: 9540 | loss: 2.268997625 step: 9550 | loss: 2.264681757 step: 9560 | loss: 2.260366683 step: 9570 | loss: 2.256052394 step: 9580 | loss: 2.251738883 step: 9590 | loss: 2.247426141 step: 9600 | loss: 2.243114162 step: 9610 | loss: 2.238802936 step: 9620 | loss: 2.234492456 step: 9630 | loss: 2.230182714 step: 9640 | loss: 2.225873703 step: 9650 | loss: 2.221565413 step: 9660 | loss: 2.217257838 step: 9670 | loss: 2.212950969 step: 9680 | loss: 2.208644798 step: 9690 | loss: 2.204339317 step: 9700 | loss: 2.200034519 step: 9710 | loss: 2.195730395 step: 9720 | loss: 2.191426938 step: 9730 | loss: 2.187124139 step: 9740 | loss: 2.182821992 step: 9750 | loss: 2.178520487 step: 9760 | loss: 2.174219617 step: 9770 | loss: 2.169919374 step: 9780 | loss: 2.165619750 step: 9790 | loss: 2.161320738 step: 9800 | loss: 2.157022329 step: 9810 | loss: 2.152724516 step: 9820 | loss: 2.148427291 step: 9830 | loss: 2.144130647 step: 9840 | loss: 2.139834575 step: 9850 | loss: 2.135539068 step: 9860 | loss: 2.131244119 step: 9870 | loss: 2.126949719 step: 9880 | loss: 2.122655862 step: 9890 | loss: 2.118362539 step: 9900 | loss: 2.114069743 step: 9910 | loss: 2.109777467 step: 9920 | loss: 2.105485703 step: 9930 | loss: 2.101194444 step: 9940 | loss: 2.096903683 step: 9950 | loss: 2.092613412 step: 9960 | loss: 2.088323624 step: 9970 | loss: 2.084034311 step: 9980 | loss: 2.079745467 step: 9990 | loss: 2.075457085 step: 10000 | loss: 2.071169158 step: 10010 | loss: 2.066881677 step: 10020 | loss: 2.062594638 step: 10030 | loss: 2.058308032 step: 10040 | loss: 2.054021853 step: 10050 | loss: 2.049736095 step: 10060 | loss: 2.045450750 step: 10070 | loss: 2.041165811 step: 10080 | loss: 2.036881273 step: 10090 | loss: 2.032597129 step: 10100 | loss: 2.028313372 step: 10110 | loss: 2.024029996 step: 10120 | loss: 2.019746995 step: 10130 | loss: 2.015464362 step: 10140 | loss: 2.011182092 step: 10150 | loss: 2.006900178 step: 10160 | loss: 2.002618613 step: 10170 | loss: 1.998337394 step: 10180 | loss: 1.994056512 step: 10190 | loss: 1.989775963 step: 10200 | loss: 1.985495741 step: 10210 | loss: 1.981215840 step: 10220 | loss: 1.976936254 step: 10230 | loss: 1.972656979 step: 10240 | loss: 1.968378009 step: 10250 | loss: 1.964099337 step: 10260 | loss: 1.959820960 step: 10270 | loss: 1.955542872 step: 10280 | loss: 1.951265068 step: 10290 | loss: 1.946987542 step: 10300 | loss: 1.942710291 step: 10310 | loss: 1.938433308 step: 10320 | loss: 1.934156590 step: 10330 | loss: 1.929880131 step: 10340 | loss: 1.925603927 step: 10350 | loss: 1.921327974 step: 10360 | loss: 1.917052267 step: 10370 | loss: 1.912776801 step: 10380 | loss: 1.908501572 step: 10390 | loss: 1.904226577 step: 10400 | loss: 1.899951810 step: 10410 | loss: 1.895677269 step: 10420 | loss: 1.891402949 step: 10430 | loss: 1.887128845 step: 10440 | loss: 1.882854955 step: 10450 | loss: 1.878581275 step: 10460 | loss: 1.874307800 step: 10470 | loss: 1.870034528 step: 10480 | loss: 1.865761455 step: 10490 | loss: 1.861488577 step: 10500 | loss: 1.857215891 step: 10510 | loss: 1.852943395 step: 10520 | loss: 1.848671084 step: 10530 | loss: 1.844398956 step: 10540 | loss: 1.840127007 step: 10550 | loss: 1.835855235 step: 10560 | loss: 1.831583637 step: 10570 | loss: 1.827312210 step: 10580 | loss: 1.823040951 step: 10590 | loss: 1.818769858 step: 10600 | loss: 1.814498928 step: 10610 | loss: 1.810228158 step: 10620 | loss: 1.805957547 step: 10630 | loss: 1.801687091 step: 10640 | loss: 1.797416788 step: 10650 | loss: 1.793146637 step: 10660 | loss: 1.788876635 step: 10670 | loss: 1.784606779 step: 10680 | loss: 1.780337068 step: 10690 | loss: 1.776067500 step: 10700 | loss: 1.771798073 step: 10710 | loss: 1.767528785 step: 10720 | loss: 1.763259634 step: 10730 | loss: 1.758990619 step: 10740 | loss: 1.754721737 step: 10750 | loss: 1.750452987 step: 10760 | loss: 1.746184368 step: 10770 | loss: 1.741915877 step: 10780 | loss: 1.737647515 step: 10790 | loss: 1.733379278 step: 10800 | loss: 1.729111165 step: 10810 | loss: 1.724843177 step: 10820 | loss: 1.720575310 step: 10830 | loss: 1.716307563 step: 10840 | loss: 1.712039937 step: 10850 | loss: 1.707772429 step: 10860 | loss: 1.703505038 step: 10870 | loss: 1.699237764 step: 10880 | loss: 1.694970604 step: 10890 | loss: 1.690703559 step: 10900 | loss: 1.686436628 step: 10910 | loss: 1.682169809 step: 10920 | loss: 1.677903101 step: 10930 | loss: 1.673636504 step: 10940 | loss: 1.669370017 step: 10950 | loss: 1.665103640 step: 10960 | loss: 1.660837370 step: 10970 | loss: 1.656571209 step: 10980 | loss: 1.652305154 step: 10990 | loss: 1.648039207 step: 11000 | loss: 1.643773365 step: 11010 | loss: 1.639507628 step: 11020 | loss: 1.635241996 step: 11030 | loss: 1.630976468 step: 11040 | loss: 1.626711045 step: 11050 | loss: 1.622445724 step: 11060 | loss: 1.618180506 step: 11070 | loss: 1.613915391 step: 11080 | loss: 1.609650378 step: 11090 | loss: 1.605385467 step: 11100 | loss: 1.601120657 step: 11110 | loss: 1.596855948 step: 11120 | loss: 1.592591340 step: 11130 | loss: 1.588326832 step: 11140 | loss: 1.584062425 step: 11150 | loss: 1.579798118 step: 11160 | loss: 1.575533910 step: 11170 | loss: 1.571269802 step: 11180 | loss: 1.567005794 step: 11190 | loss: 1.562741884 step: 11200 | loss: 1.558478074 step: 11210 | loss: 1.554214363 step: 11220 | loss: 1.549950750 step: 11230 | loss: 1.545687236 step: 11240 | loss: 1.541423821 step: 11250 | loss: 1.537160505 step: 11260 | loss: 1.532897287 step: 11270 | loss: 1.528634167 step: 11280 | loss: 1.524371146 step: 11290 | loss: 1.520108223 step: 11300 | loss: 1.515845399 step: 11310 | loss: 1.511582673 step: 11320 | loss: 1.507320046 step: 11330 | loss: 1.503057517 step: 11340 | loss: 1.498795087 step: 11350 | loss: 1.494532755 step: 11360 | loss: 1.490270522 step: 11370 | loss: 1.486008388 step: 11380 | loss: 1.481746353 step: 11390 | loss: 1.477484416 step: 11400 | loss: 1.473222579 step: 11410 | loss: 1.468960841 step: 11420 | loss: 1.464699202 step: 11430 | loss: 1.460437662 step: 11440 | loss: 1.456176223 step: 11450 | loss: 1.451914883 step: 11460 | loss: 1.447653643 step: 11470 | loss: 1.443392503 step: 11480 | loss: 1.439131463 step: 11490 | loss: 1.434870524 step: 11500 | loss: 1.430609686 step: 11510 | loss: 1.426348949 step: 11520 | loss: 1.422088313 step: 11530 | loss: 1.417827779 step: 11540 | loss: 1.413567346 step: 11550 | loss: 1.409307015 step: 11560 | loss: 1.405046787 step: 11570 | loss: 1.400786661 step: 11580 | loss: 1.396526638 step: 11590 | loss: 1.392266718 step: 11600 | loss: 1.388006901 step: 11610 | loss: 1.383747189 step: 11620 | loss: 1.379487580 step: 11630 | loss: 1.375228076 step: 11640 | loss: 1.370968677 step: 11650 | loss: 1.366709383 step: 11660 | loss: 1.362450194 step: 11670 | loss: 1.358191111 step: 11680 | loss: 1.353932135 step: 11690 | loss: 1.349673265 step: 11700 | loss: 1.345414502 step: 11710 | loss: 1.341155847 step: 11720 | loss: 1.336897300 step: 11730 | loss: 1.332638861 step: 11740 | loss: 1.328380531 step: 11750 | loss: 1.324122311 step: 11760 | loss: 1.319864200 step: 11770 | loss: 1.315606199 step: 11780 | loss: 1.311348309 step: 11790 | loss: 1.307090531 step: 11800 | loss: 1.302832864 step: 11810 | loss: 1.298575309 step: 11820 | loss: 1.294317867 step: 11830 | loss: 1.290060538 step: 11840 | loss: 1.285803323 step: 11850 | loss: 1.281546223 step: 11860 | loss: 1.277289238 step: 11870 | loss: 1.273032368 step: 11880 | loss: 1.268775614 step: 11890 | loss: 1.264518977 step: 11900 | loss: 1.260262458 step: 11910 | loss: 1.256006057 step: 11920 | loss: 1.251749774 step: 11930 | loss: 1.247493611 step: 11940 | loss: 1.243237567 step: 11950 | loss: 1.238981645 step: 11960 | loss: 1.234725843 step: 11970 | loss: 1.230470164 step: 11980 | loss: 1.226214607 step: 11990 | loss: 1.221959174 step: 12000 | loss: 1.217703866 step: 12010 | loss: 1.213448682 step: 12020 | loss: 1.209193624 step: 12030 | loss: 1.204938692 step: 12040 | loss: 1.200683888 step: 12050 | loss: 1.196429212 step: 12060 | loss: 1.192174665 step: 12070 | loss: 1.187920247 step: 12080 | loss: 1.183665960 step: 12090 | loss: 1.179411805 step: 12100 | loss: 1.175157782 step: 12110 | loss: 1.170903892 step: 12120 | loss: 1.166650137 step: 12130 | loss: 1.162396516 step: 12140 | loss: 1.158143031 step: 12150 | loss: 1.153889683 step: 12160 | loss: 1.149636474 step: 12170 | loss: 1.145383403 step: 12180 | loss: 1.141130471 step: 12190 | loss: 1.136877681 step: 12200 | loss: 1.132625032 step: 12210 | loss: 1.128372527 step: 12220 | loss: 1.124120165 step: 12230 | loss: 1.119867949 step: 12240 | loss: 1.115615878 step: 12250 | loss: 1.111363955 step: 12260 | loss: 1.107112180 step: 12270 | loss: 1.102860554 step: 12280 | loss: 1.098609080 step: 12290 | loss: 1.094357757 step: 12300 | loss: 1.090106587 step: 12310 | loss: 1.085855571 step: 12320 | loss: 1.081604711 step: 12330 | loss: 1.077354007 step: 12340 | loss: 1.073103462 step: 12350 | loss: 1.068853075 step: 12360 | loss: 1.064602850 step: 12370 | loss: 1.060352786 step: 12380 | loss: 1.056102885 step: 12390 | loss: 1.051853149 step: 12400 | loss: 1.047603579 step: 12410 | loss: 1.043354177 step: 12420 | loss: 1.039104943 step: 12430 | loss: 1.034855880 step: 12440 | loss: 1.030606989 step: 12450 | loss: 1.026358271 step: 12460 | loss: 1.022109727 step: 12470 | loss: 1.017861360 step: 12480 | loss: 1.013613172 step: 12490 | loss: 1.009365162 step: 12500 | loss: 1.005117334 step: 12510 | loss: 1.000869689 step: 12520 | loss: 0.996622228 step: 12530 | loss: 0.992374953 step: 12540 | loss: 0.988127866 step: 12550 | loss: 0.983880969 step: 12560 | loss: 0.979634264 step: 12570 | loss: 0.975387752 step: 12580 | loss: 0.971141434 step: 12590 | loss: 0.966895314 step: 12600 | loss: 0.962649393 step: 12610 | loss: 0.958403672 step: 12620 | loss: 0.954158154 step: 12630 | loss: 0.949912841 step: 12640 | loss: 0.945667735 step: 12650 | loss: 0.941422837 step: 12660 | loss: 0.937178150 step: 12670 | loss: 0.932933676 step: 12680 | loss: 0.928689417 step: 12690 | loss: 0.924445376 step: 12700 | loss: 0.920201553 step: 12710 | loss: 0.915957953 step: 12720 | loss: 0.911714577 step: 12730 | loss: 0.907471426 step: 12740 | loss: 0.903228505 step: 12750 | loss: 0.898985815 step: 12760 | loss: 0.894743358 step: 12770 | loss: 0.890501137 step: 12780 | loss: 0.886259154 step: 12790 | loss: 0.882017413 step: 12800 | loss: 0.877775915 step: 12810 | loss: 0.873534664 step: 12820 | loss: 0.869293661 step: 12830 | loss: 0.865052910 step: 12840 | loss: 0.860812413 step: 12850 | loss: 0.856572174 step: 12860 | loss: 0.852332194 step: 12870 | loss: 0.848092478 step: 12880 | loss: 0.843853027 step: 12890 | loss: 0.839613845 step: 12900 | loss: 0.835374936 step: 12910 | loss: 0.831136301 step: 12920 | loss: 0.826897944 step: 12930 | loss: 0.822659869 step: 12940 | loss: 0.818422079 step: 12950 | loss: 0.814184576 step: 12960 | loss: 0.809947365 step: 12970 | loss: 0.805710448 step: 12980 | loss: 0.801473830 step: 12990 | loss: 0.797237514 step: 13000 | loss: 0.793001502 step: 13010 | loss: 0.788765800 step: 13020 | loss: 0.784530411 step: 13030 | loss: 0.780295338 step: 13040 | loss: 0.776060586 step: 13050 | loss: 0.771826157 step: 13060 | loss: 0.767592058 step: 13070 | loss: 0.763358290 step: 13080 | loss: 0.759124859 step: 13090 | loss: 0.754891769 step: 13100 | loss: 0.750659024 step: 13110 | loss: 0.746426628 step: 13120 | loss: 0.742194586 step: 13130 | loss: 0.737962902 step: 13140 | loss: 0.733731581 step: 13150 | loss: 0.729500627 step: 13160 | loss: 0.725270046 step: 13170 | loss: 0.721039842 step: 13180 | loss: 0.716810021 step: 13190 | loss: 0.712580586 step: 13200 | loss: 0.708351544 step: 13210 | loss: 0.704122899 step: 13220 | loss: 0.699894657 step: 13230 | loss: 0.695666823 step: 13240 | loss: 0.691439403 step: 13250 | loss: 0.687212402 step: 13260 | loss: 0.682985826 step: 13270 | loss: 0.678759681 step: 13280 | loss: 0.674533973 step: 13290 | loss: 0.670308708 step: 13300 | loss: 0.666083892 step: 13310 | loss: 0.661859531 step: 13320 | loss: 0.657635632 step: 13330 | loss: 0.653412201 step: 13340 | loss: 0.649189245 step: 13350 | loss: 0.644966771 step: 13360 | loss: 0.640744786 step: 13370 | loss: 0.636523297 step: 13380 | loss: 0.632302310 step: 13390 | loss: 0.628081834 step: 13400 | loss: 0.623861875 step: 13410 | loss: 0.619642442 step: 13420 | loss: 0.615423543 step: 13430 | loss: 0.611205184 step: 13440 | loss: 0.606987376 step: 13450 | loss: 0.602770124 step: 13460 | loss: 0.598553439 step: 13470 | loss: 0.594337329 step: 13480 | loss: 0.590121802 step: 13490 | loss: 0.585906868 step: 13500 | loss: 0.581692536 step: 13510 | loss: 0.577478815 step: 13520 | loss: 0.573265715 step: 13530 | loss: 0.569053246 step: 13540 | loss: 0.564841417 step: 13550 | loss: 0.560630239 step: 13560 | loss: 0.556419723 step: 13570 | loss: 0.552209879 step: 13580 | loss: 0.548000717 step: 13590 | loss: 0.543792250 step: 13600 | loss: 0.539584489 step: 13610 | loss: 0.535377444 step: 13620 | loss: 0.531171129 step: 13630 | loss: 0.526965555 step: 13640 | loss: 0.522760736 step: 13650 | loss: 0.518556682 step: 13660 | loss: 0.514353409 step: 13670 | loss: 0.510150928 step: 13680 | loss: 0.505949254 step: 13690 | loss: 0.501748401 step: 13700 | loss: 0.497548383 step: 13710 | loss: 0.493349215 step: 13720 | loss: 0.489150911 step: 13730 | loss: 0.484953487 step: 13740 | loss: 0.480756960 step: 13750 | loss: 0.476561344 step: 13760 | loss: 0.472366656 step: 13770 | loss: 0.468172915 step: 13780 | loss: 0.463980135 step: 13790 | loss: 0.459788337 step: 13800 | loss: 0.455597537 step: 13810 | loss: 0.451407754 step: 13820 | loss: 0.447219008 step: 13830 | loss: 0.443031318 step: 13840 | loss: 0.438844705 step: 13850 | loss: 0.434659188 step: 13860 | loss: 0.430474790 step: 13870 | loss: 0.426291532 step: 13880 | loss: 0.422109436 step: 13890 | loss: 0.417928525 step: 13900 | loss: 0.413748823 step: 13910 | loss: 0.409570354 step: 13920 | loss: 0.405393142 step: 13930 | loss: 0.401217214 step: 13940 | loss: 0.397042595 step: 13950 | loss: 0.392869312 step: 13960 | loss: 0.388697393 step: 13970 | loss: 0.384526866 step: 13980 | loss: 0.380357761 step: 13990 | loss: 0.376190108 step: 14000 | loss: 0.372023937 step: 14010 | loss: 0.367859281 step: 14020 | loss: 0.363696171 step: 14030 | loss: 0.359534643 step: 14040 | loss: 0.355374729 step: 14050 | loss: 0.351216467 step: 14060 | loss: 0.347059893 step: 14070 | loss: 0.342905044 step: 14080 | loss: 0.338751960 step: 14090 | loss: 0.334600681 step: 14100 | loss: 0.330451248 step: 14110 | loss: 0.326303704 step: 14120 | loss: 0.322158093 step: 14130 | loss: 0.318014461 step: 14140 | loss: 0.313872854 step: 14150 | loss: 0.309733321 step: 14160 | loss: 0.305595911 step: 14170 | loss: 0.301460676 step: 14180 | loss: 0.297327669 step: 14190 | loss: 0.293196946 step: 14200 | loss: 0.289068562 step: 14210 | loss: 0.284942577 step: 14220 | loss: 0.280819052 step: 14230 | loss: 0.276698048 step: 14240 | loss: 0.272579631 step: 14250 | loss: 0.268463868 step: 14260 | loss: 0.264350828 step: 14270 | loss: 0.260240583 step: 14280 | loss: 0.256133208 step: 14290 | loss: 0.252028779 step: 14300 | loss: 0.247927376 step: 14310 | loss: 0.243829082 step: 14320 | loss: 0.239733983 step: 14330 | loss: 0.235642167 step: 14340 | loss: 0.231553727 step: 14350 | loss: 0.227468758 step: 14360 | loss: 0.223387359 step: 14370 | loss: 0.219309634 step: 14380 | loss: 0.215235689 step: 14390 | loss: 0.211165635 step: 14400 | loss: 0.207099588 step: 14410 | loss: 0.203037668 step: 14420 | loss: 0.198979999 step: 14430 | loss: 0.194926712 step: 14440 | loss: 0.190877940 step: 14450 | loss: 0.186833824 step: 14460 | loss: 0.182794511 step: 14470 | loss: 0.178760152 step: 14480 | loss: 0.174730906 step: 14490 | loss: 0.170706938 step: 14500 | loss: 0.166688420 step: 14510 | loss: 0.162675532 step: 14520 | loss: 0.158668459 step: 14530 | loss: 0.154667398 step: 14540 | loss: 0.150672552 step: 14550 | loss: 0.146684133 step: 14560 | loss: 0.142702363 step: 14570 | loss: 0.138727473 step: 14580 | loss: 0.134759704 step: 14590 | loss: 0.130799310 step: 14600 | loss: 0.126846553 step: 14610 | loss: 0.122901708 step: 14620 | loss: 0.118965064 step: 14630 | loss: 0.115036920 step: 14640 | loss: 0.111117591 step: 14650 | loss: 0.107207403 step: 14660 | loss: 0.103306700 step: 14670 | loss: 0.099415838 step: 14680 | loss: 0.095535190 step: 14690 | loss: 0.091665144 step: 14700 | loss: 0.087806106 step: 14710 | loss: 0.083958498 step: 14720 | loss: 0.080122758 step: 14730 | loss: 0.076299342 step: 14740 | loss: 0.072488723 step: 14750 | loss: 0.068691389 step: 14760 | loss: 0.064907844 step: 14770 | loss: 0.061138608 step: 14780 | loss: 0.057384213 step: 14790 | loss: 0.053645199 step: 14800 | loss: 0.049922115 step: 14810 | loss: 0.046215511 step: 14820 | loss: 0.042525931 step: 14830 | loss: 0.038853907 step: 14840 | loss: 0.035199948 step: 14850 | loss: 0.031564521 step: 14860 | loss: 0.027948034 step: 14870 | loss: 0.024350808 step: 14880 | loss: 0.020773948 step: 14890 | loss: 0.017215555 step: 14900 | loss: 0.013675947 step: 14910 | loss: 0.010155692 step: 14920 | loss: 0.006652836 step: 14930 | loss: 0.003173990 - final loss: 0.000748 - (cd _build/default/examples/opt && ./pair.exe) - step: 0 | loss: 4.540126293 step: 10 | loss: 4.533550418 step: 20 | loss: 4.526247664 step: 30 | loss: 4.518949342 step: 40 | loss: 4.511655778 step: 50 | loss: 4.504367164 step: 60 | loss: 4.497083601 step: 70 | loss: 4.489805138 step: 80 | loss: 4.482531795 step: 90 | loss: 4.475263583 step: 100 | loss: 4.468000507 step: 110 | loss: 4.460742568 step: 120 | loss: 4.453489769 step: 130 | loss: 4.446242109 step: 140 | loss: 4.438999591 step: 150 | loss: 4.431762213 step: 160 | loss: 4.424529978 step: 170 | loss: 4.417302885 step: 180 | loss: 4.410080936 step: 190 | loss: 4.402864130 step: 200 | loss: 4.395652468 step: 210 | loss: 4.388445951 step: 220 | loss: 4.381244579 step: 230 | loss: 4.374048352 step: 240 | loss: 4.366857271 step: 250 | loss: 4.359671335 step: 260 | loss: 4.352490546 step: 270 | loss: 4.345314902 step: 280 | loss: 4.338144405 step: 290 | loss: 4.330979055 step: 300 | loss: 4.323818851 step: 310 | loss: 4.316663793 step: 320 | loss: 4.309513881 step: 330 | loss: 4.302369116 step: 340 | loss: 4.295229497 step: 350 | loss: 4.288095023 step: 360 | loss: 4.280965695 step: 370 | loss: 4.273841512 step: 380 | loss: 4.266722473 step: 390 | loss: 4.259608579 step: 400 | loss: 4.252499828 step: 410 | loss: 4.245396221 step: 420 | loss: 4.238297755 step: 430 | loss: 4.231204432 step: 440 | loss: 4.224116250 step: 450 | loss: 4.217033208 step: 460 | loss: 4.209955305 step: 470 | loss: 4.202882540 step: 480 | loss: 4.195814913 step: 490 | loss: 4.188752421 step: 500 | loss: 4.181695065 step: 510 | loss: 4.174642843 step: 520 | loss: 4.167595753 step: 530 | loss: 4.160553795 step: 540 | loss: 4.153516966 step: 550 | loss: 4.146485265 step: 560 | loss: 4.139458692 step: 570 | loss: 4.132437243 step: 580 | loss: 4.125420917 step: 590 | loss: 4.118409714 step: 600 | loss: 4.111403629 step: 610 | loss: 4.104402663 step: 620 | loss: 4.097406812 step: 630 | loss: 4.090416075 step: 640 | loss: 4.083430450 step: 650 | loss: 4.076449934 step: 660 | loss: 4.069474524 step: 670 | loss: 4.062504219 step: 680 | loss: 4.055539017 step: 690 | loss: 4.048578914 step: 700 | loss: 4.041623907 step: 710 | loss: 4.034673995 step: 720 | loss: 4.027729175 step: 730 | loss: 4.020789442 step: 740 | loss: 4.013854796 step: 750 | loss: 4.006925232 step: 760 | loss: 4.000000747 step: 770 | loss: 3.993081338 step: 780 | loss: 3.986167002 step: 790 | loss: 3.979257736 step: 800 | loss: 3.972353536 step: 810 | loss: 3.965454399 step: 820 | loss: 3.958560320 step: 830 | loss: 3.951671297 step: 840 | loss: 3.944787325 step: 850 | loss: 3.937908400 step: 860 | loss: 3.931034520 step: 870 | loss: 3.924165679 step: 880 | loss: 3.917301873 step: 890 | loss: 3.910443098 step: 900 | loss: 3.903589351 step: 910 | loss: 3.896740626 step: 920 | loss: 3.889896919 step: 930 | loss: 3.883058226 step: 940 | loss: 3.876224542 step: 950 | loss: 3.869395863 step: 960 | loss: 3.862572183 step: 970 | loss: 3.855753497 step: 980 | loss: 3.848939802 step: 990 | loss: 3.842131091 step: 1000 | loss: 3.835327360 step: 1010 | loss: 3.828528603 step: 1020 | loss: 3.821734816 step: 1030 | loss: 3.814945992 step: 1040 | loss: 3.808162127 step: 1050 | loss: 3.801383215 step: 1060 | loss: 3.794609251 step: 1070 | loss: 3.787840229 step: 1080 | loss: 3.781076143 step: 1090 | loss: 3.774316987 step: 1100 | loss: 3.767562756 step: 1110 | loss: 3.760813444 step: 1120 | loss: 3.754069045 step: 1130 | loss: 3.747329552 step: 1140 | loss: 3.740594961 step: 1150 | loss: 3.733865264 step: 1160 | loss: 3.727140455 step: 1170 | loss: 3.720420529 step: 1180 | loss: 3.713705478 step: 1190 | loss: 3.706995297 step: 1200 | loss: 3.700289979 step: 1210 | loss: 3.693589518 step: 1220 | loss: 3.686893907 step: 1230 | loss: 3.680203140 step: 1240 | loss: 3.673517210 step: 1250 | loss: 3.666836110 step: 1260 | loss: 3.660159834 step: 1270 | loss: 3.653488374 step: 1280 | loss: 3.646821726 step: 1290 | loss: 3.640159880 step: 1300 | loss: 3.633502832 step: 1310 | loss: 3.626850573 step: 1320 | loss: 3.620203098 step: 1330 | loss: 3.613560399 step: 1340 | loss: 3.606922469 step: 1350 | loss: 3.600289301 step: 1360 | loss: 3.593660890 step: 1370 | loss: 3.587037227 step: 1380 | loss: 3.580418306 step: 1390 | loss: 3.573804119 step: 1400 | loss: 3.567194661 step: 1410 | loss: 3.560589924 step: 1420 | loss: 3.553989901 step: 1430 | loss: 3.547394586 step: 1440 | loss: 3.540803971 step: 1450 | loss: 3.534218049 step: 1460 | loss: 3.527636815 step: 1470 | loss: 3.521060261 step: 1480 | loss: 3.514488380 step: 1490 | loss: 3.507921166 step: 1500 | loss: 3.501358612 step: 1510 | loss: 3.494800711 step: 1520 | loss: 3.488247458 step: 1530 | loss: 3.481698844 step: 1540 | loss: 3.475154864 step: 1550 | loss: 3.468615512 step: 1560 | loss: 3.462080781 step: 1570 | loss: 3.455550665 step: 1580 | loss: 3.449025157 step: 1590 | loss: 3.442504252 step: 1600 | loss: 3.435987944 step: 1610 | loss: 3.429476226 step: 1620 | loss: 3.422969092 step: 1630 | loss: 3.416466538 step: 1640 | loss: 3.409968556 step: 1650 | loss: 3.403475142 step: 1660 | loss: 3.396986291 step: 1670 | loss: 3.390501996 step: 1680 | loss: 3.384022252 step: 1690 | loss: 3.377547055 step: 1700 | loss: 3.371076399 step: 1710 | loss: 3.364610280 step: 1720 | loss: 3.358148692 step: 1730 | loss: 3.351691631 step: 1740 | loss: 3.345239093 step: 1750 | loss: 3.338791073 step: 1760 | loss: 3.332347567 step: 1770 | loss: 3.325908571 step: 1780 | loss: 3.319474080 step: 1790 | loss: 3.313044092 step: 1800 | loss: 3.306618603 step: 1810 | loss: 3.300197609 step: 1820 | loss: 3.293781106 step: 1830 | loss: 3.287369092 step: 1840 | loss: 3.280961565 step: 1850 | loss: 3.274558520 step: 1860 | loss: 3.268159955 step: 1870 | loss: 3.261765869 step: 1880 | loss: 3.255376258 step: 1890 | loss: 3.248991122 step: 1900 | loss: 3.242610457 step: 1910 | loss: 3.236234262 step: 1920 | loss: 3.229862536 step: 1930 | loss: 3.223495277 step: 1940 | loss: 3.217132485 step: 1950 | loss: 3.210774158 step: 1960 | loss: 3.204420296 step: 1970 | loss: 3.198070898 step: 1980 | loss: 3.191725964 step: 1990 | loss: 3.185385493 step: 2000 | loss: 3.179049487 step: 2010 | loss: 3.172717945 step: 2020 | loss: 3.166390867 step: 2030 | loss: 3.160068255 step: 2040 | loss: 3.153750109 step: 2050 | loss: 3.147436431 step: 2060 | loss: 3.141127221 step: 2070 | loss: 3.134822482 step: 2080 | loss: 3.128522215 step: 2090 | loss: 3.122226422 step: 2100 | loss: 3.115935105 step: 2110 | loss: 3.109648266 step: 2120 | loss: 3.103365909 step: 2130 | loss: 3.097088035 step: 2140 | loss: 3.090814649 step: 2150 | loss: 3.084545752 step: 2160 | loss: 3.078281348 step: 2170 | loss: 3.072021442 step: 2180 | loss: 3.065766036 step: 2190 | loss: 3.059515135 step: 2200 | loss: 3.053268742 step: 2210 | loss: 3.047026863 step: 2220 | loss: 3.040789501 step: 2230 | loss: 3.034556661 step: 2240 | loss: 3.028328349 step: 2250 | loss: 3.022104569 step: 2260 | loss: 3.015885327 step: 2270 | loss: 3.009670627 step: 2280 | loss: 3.003460476 step: 2290 | loss: 2.997254879 step: 2300 | loss: 2.991053843 step: 2310 | loss: 2.984857373 step: 2320 | loss: 2.978665475 step: 2330 | loss: 2.972478156 step: 2340 | loss: 2.966295423 step: 2350 | loss: 2.960117282 step: 2360 | loss: 2.953943740 step: 2370 | loss: 2.947774805 step: 2380 | loss: 2.941610482 step: 2390 | loss: 2.935450780 step: 2400 | loss: 2.929295705 step: 2410 | loss: 2.923145266 step: 2420 | loss: 2.916999470 step: 2430 | loss: 2.910858324 step: 2440 | loss: 2.904721837 step: 2450 | loss: 2.898590017 step: 2460 | loss: 2.892462871 step: 2470 | loss: 2.886340408 step: 2480 | loss: 2.880222636 step: 2490 | loss: 2.874109563 step: 2500 | loss: 2.868001198 step: 2510 | loss: 2.861897549 step: 2520 | loss: 2.855798625 step: 2530 | loss: 2.849704434 step: 2540 | loss: 2.843614986 step: 2550 | loss: 2.837530289 step: 2560 | loss: 2.831450352 step: 2570 | loss: 2.825375184 step: 2580 | loss: 2.819304793 step: 2590 | loss: 2.813239190 step: 2600 | loss: 2.807178382 step: 2610 | loss: 2.801122379 step: 2620 | loss: 2.795071190 step: 2630 | loss: 2.789024824 step: 2640 | loss: 2.782983291 step: 2650 | loss: 2.776946599 step: 2660 | loss: 2.770914757 step: 2670 | loss: 2.764887776 step: 2680 | loss: 2.758865664 step: 2690 | loss: 2.752848430 step: 2700 | loss: 2.746836084 step: 2710 | loss: 2.740828635 step: 2720 | loss: 2.734826091 step: 2730 | loss: 2.728828463 step: 2740 | loss: 2.722835759 step: 2750 | loss: 2.716847988 step: 2760 | loss: 2.710865161 step: 2770 | loss: 2.704887284 step: 2780 | loss: 2.698914369 step: 2790 | loss: 2.692946423 step: 2800 | loss: 2.686983456 step: 2810 | loss: 2.681025477 step: 2820 | loss: 2.675072494 step: 2830 | loss: 2.669124517 step: 2840 | loss: 2.663181554 step: 2850 | loss: 2.657243613 step: 2860 | loss: 2.651310704 step: 2870 | loss: 2.645382835 step: 2880 | loss: 2.639460015 step: 2890 | loss: 2.633542252 step: 2900 | loss: 2.627629554 step: 2910 | loss: 2.621721930 step: 2920 | loss: 2.615819388 step: 2930 | loss: 2.609921935 step: 2940 | loss: 2.604029581 step: 2950 | loss: 2.598142333 step: 2960 | loss: 2.592260198 step: 2970 | loss: 2.586383185 step: 2980 | loss: 2.580511301 step: 2990 | loss: 2.574644553 step: 3000 | loss: 2.568782950 step: 3010 | loss: 2.562926497 step: 3020 | loss: 2.557075204 step: 3030 | loss: 2.551229076 step: 3040 | loss: 2.545388120 step: 3050 | loss: 2.539552343 step: 3060 | loss: 2.533721753 step: 3070 | loss: 2.527896355 step: 3080 | loss: 2.522076156 step: 3090 | loss: 2.516261162 step: 3100 | loss: 2.510451380 step: 3110 | loss: 2.504646815 step: 3120 | loss: 2.498847473 step: 3130 | loss: 2.493053360 step: 3140 | loss: 2.487264482 step: 3150 | loss: 2.481480843 step: 3160 | loss: 2.475702450 step: 3170 | loss: 2.469929307 step: 3180 | loss: 2.464161419 step: 3190 | loss: 2.458398791 step: 3200 | loss: 2.452641427 step: 3210 | loss: 2.446889333 step: 3220 | loss: 2.441142511 step: 3230 | loss: 2.435400967 step: 3240 | loss: 2.429664704 step: 3250 | loss: 2.423933726 step: 3260 | loss: 2.418208036 step: 3270 | loss: 2.412487638 step: 3280 | loss: 2.406772535 step: 3290 | loss: 2.401062729 step: 3300 | loss: 2.395358225 step: 3310 | loss: 2.389659023 step: 3320 | loss: 2.383965127 step: 3330 | loss: 2.378276539 step: 3340 | loss: 2.372593260 step: 3350 | loss: 2.366915293 step: 3360 | loss: 2.361242639 step: 3370 | loss: 2.355575300 step: 3380 | loss: 2.349913275 step: 3390 | loss: 2.344256567 step: 3400 | loss: 2.338605177 step: 3410 | loss: 2.332959103 step: 3420 | loss: 2.327318348 step: 3430 | loss: 2.321682910 step: 3440 | loss: 2.316052790 step: 3450 | loss: 2.310427987 step: 3460 | loss: 2.304808501 step: 3470 | loss: 2.299194330 step: 3480 | loss: 2.293585473 step: 3490 | loss: 2.287981930 step: 3500 | loss: 2.282383698 step: 3510 | loss: 2.276790775 step: 3520 | loss: 2.271203160 step: 3530 | loss: 2.265620850 step: 3540 | loss: 2.260043843 step: 3550 | loss: 2.254472135 step: 3560 | loss: 2.248905724 step: 3570 | loss: 2.243344605 step: 3580 | loss: 2.237788776 step: 3590 | loss: 2.232238233 step: 3600 | loss: 2.226692972 step: 3610 | loss: 2.221152987 step: 3620 | loss: 2.215618275 step: 3630 | loss: 2.210088830 step: 3640 | loss: 2.204564648 step: 3650 | loss: 2.199045722 step: 3660 | loss: 2.193532048 step: 3670 | loss: 2.188023619 step: 3680 | loss: 2.182520428 step: 3690 | loss: 2.177022470 step: 3700 | loss: 2.171529738 step: 3710 | loss: 2.166042224 step: 3720 | loss: 2.160559921 step: 3730 | loss: 2.155082821 step: 3740 | loss: 2.149610918 step: 3750 | loss: 2.144144201 step: 3760 | loss: 2.138682664 step: 3770 | loss: 2.133226297 step: 3780 | loss: 2.127775091 step: 3790 | loss: 2.122329037 step: 3800 | loss: 2.116888126 step: 3810 | loss: 2.111452347 step: 3820 | loss: 2.106021691 step: 3830 | loss: 2.100596147 step: 3840 | loss: 2.095175705 step: 3850 | loss: 2.089760353 step: 3860 | loss: 2.084350081 step: 3870 | loss: 2.078944876 step: 3880 | loss: 2.073544728 step: 3890 | loss: 2.068149624 step: 3900 | loss: 2.062759552 step: 3910 | loss: 2.057374500 step: 3920 | loss: 2.051994454 step: 3930 | loss: 2.046619401 step: 3940 | loss: 2.041249329 step: 3950 | loss: 2.035884223 step: 3960 | loss: 2.030524070 step: 3970 | loss: 2.025168855 step: 3980 | loss: 2.019818565 step: 3990 | loss: 2.014473184 step: 4000 | loss: 2.009132698 step: 4010 | loss: 2.003797092 step: 4020 | loss: 1.998466349 step: 4030 | loss: 1.993140456 step: 4040 | loss: 1.987819394 step: 4050 | loss: 1.982503150 step: 4060 | loss: 1.977191705 step: 4070 | loss: 1.971885045 step: 4080 | loss: 1.966583151 step: 4090 | loss: 1.961286007 step: 4100 | loss: 1.955993595 step: 4110 | loss: 1.950705899 step: 4120 | loss: 1.945422900 step: 4130 | loss: 1.940144580 step: 4140 | loss: 1.934870923 step: 4150 | loss: 1.929601908 step: 4160 | loss: 1.924337518 step: 4170 | loss: 1.919077734 step: 4180 | loss: 1.913822537 step: 4190 | loss: 1.908571909 step: 4200 | loss: 1.903325829 step: 4210 | loss: 1.898084279 step: 4220 | loss: 1.892847239 step: 4230 | loss: 1.887614689 step: 4240 | loss: 1.882386609 step: 4250 | loss: 1.877162980 step: 4260 | loss: 1.871943782 step: 4270 | loss: 1.866728993 step: 4280 | loss: 1.861518594 step: 4290 | loss: 1.856312564 step: 4300 | loss: 1.851110882 step: 4310 | loss: 1.845913528 step: 4320 | loss: 1.840720480 step: 4330 | loss: 1.835531719 step: 4340 | loss: 1.830347222 step: 4350 | loss: 1.825166969 step: 4360 | loss: 1.819990938 step: 4370 | loss: 1.814819108 step: 4380 | loss: 1.809651459 step: 4390 | loss: 1.804487967 step: 4400 | loss: 1.799328613 step: 4410 | loss: 1.794173374 step: 4420 | loss: 1.789022229 step: 4430 | loss: 1.783875157 step: 4440 | loss: 1.778732135 step: 4450 | loss: 1.773593143 step: 4460 | loss: 1.768458159 step: 4470 | loss: 1.763327161 step: 4480 | loss: 1.758200128 step: 4490 | loss: 1.753077039 step: 4500 | loss: 1.747957871 step: 4510 | loss: 1.742842604 step: 4520 | loss: 1.737731215 step: 4530 | loss: 1.732623685 step: 4540 | loss: 1.727519992 step: 4550 | loss: 1.722420113 step: 4560 | loss: 1.717324030 step: 4570 | loss: 1.712231719 step: 4580 | loss: 1.707143161 step: 4590 | loss: 1.702058335 step: 4600 | loss: 1.696977220 step: 4610 | loss: 1.691899796 step: 4620 | loss: 1.686826042 step: 4630 | loss: 1.681755938 step: 4640 | loss: 1.676689464 step: 4650 | loss: 1.671626600 step: 4660 | loss: 1.666567326 step: 4670 | loss: 1.661511623 step: 4680 | loss: 1.656459472 step: 4690 | loss: 1.651410852 step: 4700 | loss: 1.646365746 step: 4710 | loss: 1.641324134 step: 4720 | loss: 1.636285997 step: 4730 | loss: 1.631251319 step: 4740 | loss: 1.626220079 step: 4750 | loss: 1.621192261 step: 4760 | loss: 1.616167847 step: 4770 | loss: 1.611146820 step: 4780 | loss: 1.606129162 step: 4790 | loss: 1.601114856 step: 4800 | loss: 1.596103887 step: 4810 | loss: 1.591096238 step: 4820 | loss: 1.586091892 step: 4830 | loss: 1.581090835 step: 4840 | loss: 1.576093051 step: 4850 | loss: 1.571098524 step: 4860 | loss: 1.566107241 step: 4870 | loss: 1.561119187 step: 4880 | loss: 1.556134347 step: 4890 | loss: 1.551152708 step: 4900 | loss: 1.546174257 step: 4910 | loss: 1.541198980 step: 4920 | loss: 1.536226866 step: 4930 | loss: 1.531257901 step: 4940 | loss: 1.526292074 step: 4950 | loss: 1.521329372 step: 4960 | loss: 1.516369786 step: 4970 | loss: 1.511413304 step: 4980 | loss: 1.506459916 step: 4990 | loss: 1.501509611 step: 5000 | loss: 1.496562380 step: 5010 | loss: 1.491618215 step: 5020 | loss: 1.486677105 step: 5030 | loss: 1.481739042 step: 5040 | loss: 1.476804019 step: 5050 | loss: 1.471872028 step: 5060 | loss: 1.466943062 step: 5070 | loss: 1.462017113 step: 5080 | loss: 1.457094176 step: 5090 | loss: 1.452174245 step: 5100 | loss: 1.447257313 step: 5110 | loss: 1.442343377 step: 5120 | loss: 1.437432431 step: 5130 | loss: 1.432524471 step: 5140 | loss: 1.427619493 step: 5150 | loss: 1.422717494 step: 5160 | loss: 1.417818471 step: 5170 | loss: 1.412922422 step: 5180 | loss: 1.408029344 step: 5190 | loss: 1.403139236 step: 5200 | loss: 1.398252096 step: 5210 | loss: 1.393367924 step: 5220 | loss: 1.388486720 step: 5230 | loss: 1.383608484 step: 5240 | loss: 1.378733215 step: 5250 | loss: 1.373860916 step: 5260 | loss: 1.368991588 step: 5270 | loss: 1.364125232 step: 5280 | loss: 1.359261850 step: 5290 | loss: 1.354401447 step: 5300 | loss: 1.349544023 step: 5310 | loss: 1.344689584 step: 5320 | loss: 1.339838134 step: 5330 | loss: 1.334989675 step: 5340 | loss: 1.330144215 step: 5350 | loss: 1.325301757 step: 5360 | loss: 1.320462307 step: 5370 | loss: 1.315625872 step: 5380 | loss: 1.310792458 step: 5390 | loss: 1.305962071 step: 5400 | loss: 1.301134720 step: 5410 | loss: 1.296310412 step: 5420 | loss: 1.291489155 step: 5430 | loss: 1.286670958 step: 5440 | loss: 1.281855829 step: 5450 | loss: 1.277043778 step: 5460 | loss: 1.272234815 step: 5470 | loss: 1.267428949 step: 5480 | loss: 1.262626191 step: 5490 | loss: 1.257826551 step: 5500 | loss: 1.253030042 step: 5510 | loss: 1.248236675 step: 5520 | loss: 1.243446460 step: 5530 | loss: 1.238659412 step: 5540 | loss: 1.233875542 step: 5550 | loss: 1.229094863 step: 5560 | loss: 1.224317389 step: 5570 | loss: 1.219543134 step: 5580 | loss: 1.214772110 step: 5590 | loss: 1.210004334 step: 5600 | loss: 1.205239818 step: 5610 | loss: 1.200478579 step: 5620 | loss: 1.195720632 step: 5630 | loss: 1.190965991 step: 5640 | loss: 1.186214674 step: 5650 | loss: 1.181466696 step: 5660 | loss: 1.176722074 step: 5670 | loss: 1.171980825 step: 5680 | loss: 1.167242966 step: 5690 | loss: 1.162508513 step: 5700 | loss: 1.157777486 step: 5710 | loss: 1.153049901 step: 5720 | loss: 1.148325777 step: 5730 | loss: 1.143605132 step: 5740 | loss: 1.138887986 step: 5750 | loss: 1.134174357 step: 5760 | loss: 1.129464264 step: 5770 | loss: 1.124757727 step: 5780 | loss: 1.120054766 step: 5790 | loss: 1.115355400 step: 5800 | loss: 1.110659650 step: 5810 | loss: 1.105967537 step: 5820 | loss: 1.101279080 step: 5830 | loss: 1.096594301 step: 5840 | loss: 1.091913220 step: 5850 | loss: 1.087235860 step: 5860 | loss: 1.082562241 step: 5870 | loss: 1.077892386 step: 5880 | loss: 1.073226315 step: 5890 | loss: 1.068564052 step: 5900 | loss: 1.063905619 step: 5910 | loss: 1.059251037 step: 5920 | loss: 1.054600329 step: 5930 | loss: 1.049953519 step: 5940 | loss: 1.045310629 step: 5950 | loss: 1.040671683 step: 5960 | loss: 1.036036702 step: 5970 | loss: 1.031405712 step: 5980 | loss: 1.026778736 step: 5990 | loss: 1.022155796 step: 6000 | loss: 1.017536917 step: 6010 | loss: 1.012922124 step: 6020 | loss: 1.008311439 step: 6030 | loss: 1.003704888 step: 6040 | loss: 0.999102494 step: 6050 | loss: 0.994504282 step: 6060 | loss: 0.989910277 step: 6070 | loss: 0.985320502 step: 6080 | loss: 0.980734984 step: 6090 | loss: 0.976153745 step: 6100 | loss: 0.971576813 step: 6110 | loss: 0.967004210 step: 6120 | loss: 0.962435963 step: 6130 | loss: 0.957872097 step: 6140 | loss: 0.953312636 step: 6150 | loss: 0.948757605 step: 6160 | loss: 0.944207031 step: 6170 | loss: 0.939660938 step: 6180 | loss: 0.935119351 step: 6190 | loss: 0.930582296 step: 6200 | loss: 0.926049798 step: 6210 | loss: 0.921521883 step: 6220 | loss: 0.916998575 step: 6230 | loss: 0.912479900 step: 6240 | loss: 0.907965884 step: 6250 | loss: 0.903456551 step: 6260 | loss: 0.898951926 step: 6270 | loss: 0.894452036 step: 6280 | loss: 0.889956905 step: 6290 | loss: 0.885466558 step: 6300 | loss: 0.880981020 step: 6310 | loss: 0.876500316 step: 6320 | loss: 0.872024471 step: 6330 | loss: 0.867553510 step: 6340 | loss: 0.863087457 step: 6350 | loss: 0.858626337 step: 6360 | loss: 0.854170173 step: 6370 | loss: 0.849718992 step: 6380 | loss: 0.845272815 step: 6390 | loss: 0.840831668 step: 6400 | loss: 0.836395574 step: 6410 | loss: 0.831964557 step: 6420 | loss: 0.827538639 step: 6430 | loss: 0.823117845 step: 6440 | loss: 0.818702197 step: 6450 | loss: 0.814291717 step: 6460 | loss: 0.809886428 step: 6470 | loss: 0.805486353 step: 6480 | loss: 0.801091513 step: 6490 | loss: 0.796701929 step: 6500 | loss: 0.792317624 step: 6510 | loss: 0.787938617 step: 6520 | loss: 0.783564930 step: 6530 | loss: 0.779196582 step: 6540 | loss: 0.774833594 step: 6550 | loss: 0.770475985 step: 6560 | loss: 0.766123775 step: 6570 | loss: 0.761776981 step: 6580 | loss: 0.757435622 step: 6590 | loss: 0.753099715 step: 6600 | loss: 0.748769279 step: 6610 | loss: 0.744444330 step: 6620 | loss: 0.740124884 step: 6630 | loss: 0.735810957 step: 6640 | loss: 0.731502564 step: 6650 | loss: 0.727199719 step: 6660 | loss: 0.722902438 step: 6670 | loss: 0.718610733 step: 6680 | loss: 0.714324617 step: 6690 | loss: 0.710044103 step: 6700 | loss: 0.705769202 step: 6710 | loss: 0.701499924 step: 6720 | loss: 0.697236281 step: 6730 | loss: 0.692978281 step: 6740 | loss: 0.688725933 step: 6750 | loss: 0.684479245 step: 6760 | loss: 0.680238224 step: 6770 | loss: 0.676002877 step: 6780 | loss: 0.671773209 step: 6790 | loss: 0.667549225 step: 6800 | loss: 0.663330929 step: 6810 | loss: 0.659118323 step: 6820 | loss: 0.654911410 step: 6830 | loss: 0.650710190 step: 6840 | loss: 0.646514664 step: 6850 | loss: 0.642324831 step: 6860 | loss: 0.638140690 step: 6870 | loss: 0.633962236 step: 6880 | loss: 0.629789468 step: 6890 | loss: 0.625622379 step: 6900 | loss: 0.621460963 step: 6910 | loss: 0.617305214 step: 6920 | loss: 0.613155124 step: 6930 | loss: 0.609010682 step: 6940 | loss: 0.604871879 step: 6950 | loss: 0.600738704 step: 6960 | loss: 0.596611143 step: 6970 | loss: 0.592489182 step: 6980 | loss: 0.588372806 step: 6990 | loss: 0.584262000 step: 7000 | loss: 0.580156744 step: 7010 | loss: 0.576057021 step: 7020 | loss: 0.571962811 step: 7030 | loss: 0.567874091 step: 7040 | loss: 0.563790840 step: 7050 | loss: 0.559713032 step: 7060 | loss: 0.555640644 step: 7070 | loss: 0.551573648 step: 7080 | loss: 0.547512017 step: 7090 | loss: 0.543455721 step: 7100 | loss: 0.539404730 step: 7110 | loss: 0.535359012 step: 7120 | loss: 0.531318534 step: 7130 | loss: 0.527283261 step: 7140 | loss: 0.523253158 step: 7150 | loss: 0.519228187 step: 7160 | loss: 0.515208310 step: 7170 | loss: 0.511193487 step: 7180 | loss: 0.507183678 step: 7190 | loss: 0.503178839 step: 7200 | loss: 0.499178927 step: 7210 | loss: 0.495183898 step: 7220 | loss: 0.491193705 step: 7230 | loss: 0.487208300 step: 7240 | loss: 0.483227636 step: 7250 | loss: 0.479251663 step: 7260 | loss: 0.475280329 step: 7270 | loss: 0.471313582 step: 7280 | loss: 0.467351370 step: 7290 | loss: 0.463393638 step: 7300 | loss: 0.459440331 step: 7310 | loss: 0.455491392 step: 7320 | loss: 0.451546765 step: 7330 | loss: 0.447606390 step: 7340 | loss: 0.443670209 step: 7350 | loss: 0.439738161 step: 7360 | loss: 0.435810187 step: 7370 | loss: 0.431886223 step: 7380 | loss: 0.427966209 step: 7390 | loss: 0.424050080 step: 7400 | loss: 0.420137773 step: 7410 | loss: 0.416229224 step: 7420 | loss: 0.412324368 step: 7430 | loss: 0.408423140 step: 7440 | loss: 0.404525473 step: 7450 | loss: 0.400631303 step: 7460 | loss: 0.396740563 step: 7470 | loss: 0.392853185 step: 7480 | loss: 0.388969104 step: 7490 | loss: 0.385088253 step: 7500 | loss: 0.381210564 step: 7510 | loss: 0.377335972 step: 7520 | loss: 0.373464408 step: 7530 | loss: 0.369595806 step: 7540 | loss: 0.365730100 step: 7550 | loss: 0.361867224 step: 7560 | loss: 0.358007112 step: 7570 | loss: 0.354149699 step: 7580 | loss: 0.350294918 step: 7590 | loss: 0.346442707 step: 7600 | loss: 0.342593001 step: 7610 | loss: 0.338745738 step: 7620 | loss: 0.334900853 step: 7630 | loss: 0.331058287 step: 7640 | loss: 0.327217978 step: 7650 | loss: 0.323379865 step: 7660 | loss: 0.319543891 step: 7670 | loss: 0.315709996 step: 7680 | loss: 0.311878125 step: 7690 | loss: 0.308048220 step: 7700 | loss: 0.304220228 step: 7710 | loss: 0.300394094 step: 7720 | loss: 0.296569766 step: 7730 | loss: 0.292747193 step: 7740 | loss: 0.288926325 step: 7750 | loss: 0.285107113 step: 7760 | loss: 0.281289511 step: 7770 | loss: 0.277473471 step: 7780 | loss: 0.273658950 step: 7790 | loss: 0.269845904 step: 7800 | loss: 0.266034291 step: 7810 | loss: 0.262224072 step: 7820 | loss: 0.258415206 step: 7830 | loss: 0.254607657 step: 7840 | loss: 0.250801388 step: 7850 | loss: 0.246996364 step: 7860 | loss: 0.243192551 step: 7870 | loss: 0.239389918 step: 7880 | loss: 0.235588434 step: 7890 | loss: 0.231788069 step: 7900 | loss: 0.227988795 step: 7910 | loss: 0.224190585 step: 7920 | loss: 0.220393414 step: 7930 | loss: 0.216597256 step: 7940 | loss: 0.212802089 step: 7950 | loss: 0.209007891 step: 7960 | loss: 0.205214640 step: 7970 | loss: 0.201422316 step: 7980 | loss: 0.197630900 step: 7990 | loss: 0.193840375 step: 8000 | loss: 0.190050723 step: 8010 | loss: 0.186261927 step: 8020 | loss: 0.182473973 step: 8030 | loss: 0.178686846 step: 8040 | loss: 0.174900531 step: 8050 | loss: 0.171115016 step: 8060 | loss: 0.167330287 step: 8070 | loss: 0.163546334 step: 8080 | loss: 0.159763145 step: 8090 | loss: 0.155980708 step: 8100 | loss: 0.152199013 step: 8110 | loss: 0.148418051 step: 8120 | loss: 0.144637812 step: 8130 | loss: 0.140858287 step: 8140 | loss: 0.137079466 step: 8150 | loss: 0.133301341 step: 8160 | loss: 0.129523905 step: 8170 | loss: 0.125747149 step: 8180 | loss: 0.121971065 step: 8190 | loss: 0.118195646 step: 8200 | loss: 0.114420884 step: 8210 | loss: 0.110646772 step: 8220 | loss: 0.106873304 step: 8230 | loss: 0.103100471 step: 8240 | loss: 0.099328268 step: 8250 | loss: 0.095556688 step: 8260 | loss: 0.091785723 step: 8270 | loss: 0.088015369 step: 8280 | loss: 0.084245617 step: 8290 | loss: 0.080476462 step: 8300 | loss: 0.076707898 step: 8310 | loss: 0.072939919 step: 8320 | loss: 0.069172518 step: 8330 | loss: 0.065405689 step: 8340 | loss: 0.061639426 step: 8350 | loss: 0.057873724 step: 8360 | loss: 0.054108577 step: 8370 | loss: 0.050343978 step: 8380 | loss: 0.046579923 step: 8390 | loss: 0.042816405 step: 8400 | loss: 0.039053419 step: 8410 | loss: 0.035290960 step: 8420 | loss: 0.031529022 step: 8430 | loss: 0.027767599 step: 8440 | loss: 0.024006686 step: 8450 | loss: 0.020246279 step: 8460 | loss: 0.016486371 step: 8470 | loss: 0.012726957 step: 8480 | loss: 0.008968033 step: 8490 | loss: 0.005209594 step: 8500 | loss: 0.001451634 - final loss: 0.000700 - (cd _build/default/examples/opt && ./rmsprop.exe) - step: 0 | loss: 6.044265907 step: 10 | loss: 6.036899674 step: 20 | loss: 6.031681629 step: 30 | loss: 6.026916813 step: 40 | loss: 6.022283375 step: 50 | loss: 6.017694433 step: 60 | loss: 6.013122039 step: 70 | loss: 6.008556709 step: 80 | loss: 6.003995168 step: 90 | loss: 5.999436279 step: 100 | loss: 5.994879648 step: 110 | loss: 5.990325135 step: 120 | loss: 5.985772694 step: 130 | loss: 5.981222307 step: 140 | loss: 5.976673967 step: 150 | loss: 5.972127669 step: 160 | loss: 5.967583409 step: 170 | loss: 5.963041183 step: 180 | loss: 5.958500983 step: 190 | loss: 5.953962800 step: 200 | loss: 5.949426621 step: 210 | loss: 5.944892429 step: 220 | loss: 5.940360199 step: 230 | loss: 5.935829897 step: 240 | loss: 5.931301476 step: 250 | loss: 5.926774862 step: 260 | loss: 5.922249949 step: 270 | loss: 5.917726574 step: 280 | loss: 5.913204529 step: 290 | loss: 5.908683628 step: 300 | loss: 5.904163800 step: 310 | loss: 5.899645039 step: 320 | loss: 5.895127348 step: 330 | loss: 5.890610728 step: 340 | loss: 5.886095183 step: 350 | loss: 5.881580715 step: 360 | loss: 5.877067327 step: 370 | loss: 5.872555020 step: 380 | loss: 5.868043798 step: 390 | loss: 5.863533663 step: 400 | loss: 5.859024617 step: 410 | loss: 5.854516664 step: 420 | loss: 5.850009804 step: 430 | loss: 5.845504042 step: 440 | loss: 5.840999379 step: 450 | loss: 5.836495818 step: 460 | loss: 5.831993362 step: 470 | loss: 5.827492013 step: 480 | loss: 5.822991774 step: 490 | loss: 5.818492647 step: 500 | loss: 5.813994636 step: 510 | loss: 5.809497741 step: 520 | loss: 5.805001967 step: 530 | loss: 5.800507315 step: 540 | loss: 5.796013789 step: 550 | loss: 5.791521390 step: 560 | loss: 5.787030122 step: 570 | loss: 5.782539988 step: 580 | loss: 5.778050989 step: 590 | loss: 5.773563128 step: 600 | loss: 5.769076409 step: 610 | loss: 5.764590833 step: 620 | loss: 5.760106404 step: 630 | loss: 5.755623124 step: 640 | loss: 5.751140996 step: 650 | loss: 5.746660022 step: 660 | loss: 5.742180206 step: 670 | loss: 5.737701549 step: 680 | loss: 5.733224055 step: 690 | loss: 5.728747726 step: 700 | loss: 5.724272565 step: 710 | loss: 5.719798576 step: 720 | loss: 5.715325759 step: 730 | loss: 5.710854119 step: 740 | loss: 5.706383658 step: 750 | loss: 5.701914379 step: 760 | loss: 5.697446285 step: 770 | loss: 5.692979378 step: 780 | loss: 5.688513661 step: 790 | loss: 5.684049137 step: 800 | loss: 5.679585809 step: 810 | loss: 5.675123680 step: 820 | loss: 5.670662752 step: 830 | loss: 5.666203028 step: 840 | loss: 5.661744512 step: 850 | loss: 5.657287205 step: 860 | loss: 5.652831111 step: 870 | loss: 5.648376233 step: 880 | loss: 5.643922574 step: 890 | loss: 5.639470136 step: 900 | loss: 5.635018922 step: 910 | loss: 5.630568936 step: 920 | loss: 5.626120180 step: 930 | loss: 5.621672657 step: 940 | loss: 5.617226370 step: 950 | loss: 5.612781322 step: 960 | loss: 5.608337515 step: 970 | loss: 5.603894954 step: 980 | loss: 5.599453640 step: 990 | loss: 5.595013578 step: 1000 | loss: 5.590574769 step: 1010 | loss: 5.586137216 step: 1020 | loss: 5.581700924 step: 1030 | loss: 5.577265894 step: 1040 | loss: 5.572832129 step: 1050 | loss: 5.568399634 step: 1060 | loss: 5.563968410 step: 1070 | loss: 5.559538462 step: 1080 | loss: 5.555109791 step: 1090 | loss: 5.550682401 step: 1100 | loss: 5.546256294 step: 1110 | loss: 5.541831475 step: 1120 | loss: 5.537407947 step: 1130 | loss: 5.532985711 step: 1140 | loss: 5.528564772 step: 1150 | loss: 5.524145132 step: 1160 | loss: 5.519726794 step: 1170 | loss: 5.515309763 step: 1180 | loss: 5.510894040 step: 1190 | loss: 5.506479629 step: 1200 | loss: 5.502066533 step: 1210 | loss: 5.497654756 step: 1220 | loss: 5.493244300 step: 1230 | loss: 5.488835168 step: 1240 | loss: 5.484427365 step: 1250 | loss: 5.480020892 step: 1260 | loss: 5.475615754 step: 1270 | loss: 5.471211953 step: 1280 | loss: 5.466809493 step: 1290 | loss: 5.462408376 step: 1300 | loss: 5.458008607 step: 1310 | loss: 5.453610188 step: 1320 | loss: 5.449213123 step: 1330 | loss: 5.444817415 step: 1340 | loss: 5.440423067 step: 1350 | loss: 5.436030083 step: 1360 | loss: 5.431638465 step: 1370 | loss: 5.427248218 step: 1380 | loss: 5.422859344 step: 1390 | loss: 5.418471847 step: 1400 | loss: 5.414085730 step: 1410 | loss: 5.409700996 step: 1420 | loss: 5.405317650 step: 1430 | loss: 5.400935693 step: 1440 | loss: 5.396555131 step: 1450 | loss: 5.392175965 step: 1460 | loss: 5.387798200 step: 1470 | loss: 5.383421838 step: 1480 | loss: 5.379046884 step: 1490 | loss: 5.374673340 step: 1500 | loss: 5.370301211 step: 1510 | loss: 5.365930499 step: 1520 | loss: 5.361561208 step: 1530 | loss: 5.357193341 step: 1540 | loss: 5.352826903 step: 1550 | loss: 5.348461896 step: 1560 | loss: 5.344098324 step: 1570 | loss: 5.339736190 step: 1580 | loss: 5.335375499 step: 1590 | loss: 5.331016253 step: 1600 | loss: 5.326658456 step: 1610 | loss: 5.322302112 step: 1620 | loss: 5.317947224 step: 1630 | loss: 5.313593796 step: 1640 | loss: 5.309241831 step: 1650 | loss: 5.304891333 step: 1660 | loss: 5.300542305 step: 1670 | loss: 5.296194752 step: 1680 | loss: 5.291848677 step: 1690 | loss: 5.287504083 step: 1700 | loss: 5.283160974 step: 1710 | loss: 5.278819354 step: 1720 | loss: 5.274479226 step: 1730 | loss: 5.270140595 step: 1740 | loss: 5.265803463 step: 1750 | loss: 5.261467835 step: 1760 | loss: 5.257133714 step: 1770 | loss: 5.252801103 step: 1780 | loss: 5.248470008 step: 1790 | loss: 5.244140431 step: 1800 | loss: 5.239812376 step: 1810 | loss: 5.235485847 step: 1820 | loss: 5.231160848 step: 1830 | loss: 5.226837383 step: 1840 | loss: 5.222515454 step: 1850 | loss: 5.218195067 step: 1860 | loss: 5.213876225 step: 1870 | loss: 5.209558931 step: 1880 | loss: 5.205243191 step: 1890 | loss: 5.200929006 step: 1900 | loss: 5.196616382 step: 1910 | loss: 5.192305323 step: 1920 | loss: 5.187995831 step: 1930 | loss: 5.183687912 step: 1940 | loss: 5.179381568 step: 1950 | loss: 5.175076805 step: 1960 | loss: 5.170773625 step: 1970 | loss: 5.166472033 step: 1980 | loss: 5.162172033 step: 1990 | loss: 5.157873628 step: 2000 | loss: 5.153576823 step: 2010 | loss: 5.149281622 step: 2020 | loss: 5.144988029 step: 2030 | loss: 5.140696047 step: 2040 | loss: 5.136405681 step: 2050 | loss: 5.132116935 step: 2060 | loss: 5.127829812 step: 2070 | loss: 5.123544318 step: 2080 | loss: 5.119260456 step: 2090 | loss: 5.114978229 step: 2100 | loss: 5.110697643 step: 2110 | loss: 5.106418701 step: 2120 | loss: 5.102141408 step: 2130 | loss: 5.097865767 step: 2140 | loss: 5.093591784 step: 2150 | loss: 5.089319461 step: 2160 | loss: 5.085048803 step: 2170 | loss: 5.080779814 step: 2180 | loss: 5.076512499 step: 2190 | loss: 5.072246861 step: 2200 | loss: 5.067982906 step: 2210 | loss: 5.063720636 step: 2220 | loss: 5.059460057 step: 2230 | loss: 5.055201173 step: 2240 | loss: 5.050943988 step: 2250 | loss: 5.046688506 step: 2260 | loss: 5.042434731 step: 2270 | loss: 5.038182669 step: 2280 | loss: 5.033932322 step: 2290 | loss: 5.029683696 step: 2300 | loss: 5.025436795 step: 2310 | loss: 5.021191624 step: 2320 | loss: 5.016948185 step: 2330 | loss: 5.012706485 step: 2340 | loss: 5.008466528 step: 2350 | loss: 5.004228317 step: 2360 | loss: 4.999991858 step: 2370 | loss: 4.995757154 step: 2380 | loss: 4.991524211 step: 2390 | loss: 4.987293032 step: 2400 | loss: 4.983063622 step: 2410 | loss: 4.978835986 step: 2420 | loss: 4.974610128 step: 2430 | loss: 4.970386053 step: 2440 | loss: 4.966163765 step: 2450 | loss: 4.961943269 step: 2460 | loss: 4.957724570 step: 2470 | loss: 4.953507671 step: 2480 | loss: 4.949292577 step: 2490 | loss: 4.945079294 step: 2500 | loss: 4.940867826 step: 2510 | loss: 4.936658177 step: 2520 | loss: 4.932450352 step: 2530 | loss: 4.928244356 step: 2540 | loss: 4.924040193 step: 2550 | loss: 4.919837868 step: 2560 | loss: 4.915637386 step: 2570 | loss: 4.911438751 step: 2580 | loss: 4.907241969 step: 2590 | loss: 4.903047043 step: 2600 | loss: 4.898853980 step: 2610 | loss: 4.894662782 step: 2620 | loss: 4.890473456 step: 2630 | loss: 4.886286006 step: 2640 | loss: 4.882100437 step: 2650 | loss: 4.877916754 step: 2660 | loss: 4.873734961 step: 2670 | loss: 4.869555064 step: 2680 | loss: 4.865377067 step: 2690 | loss: 4.861200975 step: 2700 | loss: 4.857026793 step: 2710 | loss: 4.852854526 step: 2720 | loss: 4.848684180 step: 2730 | loss: 4.844515758 step: 2740 | loss: 4.840349265 step: 2750 | loss: 4.836184708 step: 2760 | loss: 4.832022091 step: 2770 | loss: 4.827861418 step: 2780 | loss: 4.823702695 step: 2790 | loss: 4.819545927 step: 2800 | loss: 4.815391119 step: 2810 | loss: 4.811238275 step: 2820 | loss: 4.807087402 step: 2830 | loss: 4.802938504 step: 2840 | loss: 4.798791587 step: 2850 | loss: 4.794646654 step: 2860 | loss: 4.790503712 step: 2870 | loss: 4.786362766 step: 2880 | loss: 4.782223821 step: 2890 | loss: 4.778086882 step: 2900 | loss: 4.773951953 step: 2910 | loss: 4.769819042 step: 2920 | loss: 4.765688152 step: 2930 | loss: 4.761559289 step: 2940 | loss: 4.757432458 step: 2950 | loss: 4.753307664 step: 2960 | loss: 4.749184914 step: 2970 | loss: 4.745064211 step: 2980 | loss: 4.740945562 step: 2990 | loss: 4.736828971 step: 3000 | loss: 4.732714445 step: 3010 | loss: 4.728601988 step: 3020 | loss: 4.724491606 step: 3030 | loss: 4.720383304 step: 3040 | loss: 4.716277088 step: 3050 | loss: 4.712172963 step: 3060 | loss: 4.708070934 step: 3070 | loss: 4.703971008 step: 3080 | loss: 4.699873189 step: 3090 | loss: 4.695777483 step: 3100 | loss: 4.691683895 step: 3110 | loss: 4.687592432 step: 3120 | loss: 4.683503098 step: 3130 | loss: 4.679415899 step: 3140 | loss: 4.675330841 step: 3150 | loss: 4.671247930 step: 3160 | loss: 4.667167170 step: 3170 | loss: 4.663088568 step: 3180 | loss: 4.659012129 step: 3190 | loss: 4.654937860 step: 3200 | loss: 4.650865764 step: 3210 | loss: 4.646795849 step: 3220 | loss: 4.642728120 step: 3230 | loss: 4.638662583 step: 3240 | loss: 4.634599243 step: 3250 | loss: 4.630538107 step: 3260 | loss: 4.626479179 step: 3270 | loss: 4.622422467 step: 3280 | loss: 4.618367975 step: 3290 | loss: 4.614315709 step: 3300 | loss: 4.610265676 step: 3310 | loss: 4.606217881 step: 3320 | loss: 4.602172330 step: 3330 | loss: 4.598129030 step: 3340 | loss: 4.594087985 step: 3350 | loss: 4.590049201 step: 3360 | loss: 4.586012686 step: 3370 | loss: 4.581978445 step: 3380 | loss: 4.577946483 step: 3390 | loss: 4.573916807 step: 3400 | loss: 4.569889422 step: 3410 | loss: 4.565864336 step: 3420 | loss: 4.561841553 step: 3430 | loss: 4.557821081 step: 3440 | loss: 4.553802924 step: 3450 | loss: 4.549787090 step: 3460 | loss: 4.545773584 step: 3470 | loss: 4.541762413 step: 3480 | loss: 4.537753582 step: 3490 | loss: 4.533747098 step: 3500 | loss: 4.529742967 step: 3510 | loss: 4.525741195 step: 3520 | loss: 4.521741788 step: 3530 | loss: 4.517744754 step: 3540 | loss: 4.513750097 step: 3550 | loss: 4.509757825 step: 3560 | loss: 4.505767943 step: 3570 | loss: 4.501780458 step: 3580 | loss: 4.497795377 step: 3590 | loss: 4.493812705 step: 3600 | loss: 4.489832449 step: 3610 | loss: 4.485854616 step: 3620 | loss: 4.481879212 step: 3630 | loss: 4.477906243 step: 3640 | loss: 4.473935715 step: 3650 | loss: 4.469967636 step: 3660 | loss: 4.466002012 step: 3670 | loss: 4.462038849 step: 3680 | loss: 4.458078154 step: 3690 | loss: 4.454119933 step: 3700 | loss: 4.450164193 step: 3710 | loss: 4.446210941 step: 3720 | loss: 4.442260182 step: 3730 | loss: 4.438311925 step: 3740 | loss: 4.434366175 step: 3750 | loss: 4.430422939 step: 3760 | loss: 4.426482224 step: 3770 | loss: 4.422544036 step: 3780 | loss: 4.418608382 step: 3790 | loss: 4.414675270 step: 3800 | loss: 4.410744705 step: 3810 | loss: 4.406816695 step: 3820 | loss: 4.402891246 step: 3830 | loss: 4.398968365 step: 3840 | loss: 4.395048059 step: 3850 | loss: 4.391130335 step: 3860 | loss: 4.387215200 step: 3870 | loss: 4.383302660 step: 3880 | loss: 4.379392723 step: 3890 | loss: 4.375485396 step: 3900 | loss: 4.371580685 step: 3910 | loss: 4.367678597 step: 3920 | loss: 4.363779140 step: 3930 | loss: 4.359882320 step: 3940 | loss: 4.355988145 step: 3950 | loss: 4.352096621 step: 3960 | loss: 4.348207756 step: 3970 | loss: 4.344321557 step: 3980 | loss: 4.340438031 step: 3990 | loss: 4.336557185 step: 4000 | loss: 4.332679026 step: 4010 | loss: 4.328803561 step: 4020 | loss: 4.324930798 step: 4030 | loss: 4.321060743 step: 4040 | loss: 4.317193405 step: 4050 | loss: 4.313328790 step: 4060 | loss: 4.309466906 step: 4070 | loss: 4.305607759 step: 4080 | loss: 4.301751358 step: 4090 | loss: 4.297897709 step: 4100 | loss: 4.294046821 step: 4110 | loss: 4.290198699 step: 4120 | loss: 4.286353353 step: 4130 | loss: 4.282510788 step: 4140 | loss: 4.278671014 step: 4150 | loss: 4.274834036 step: 4160 | loss: 4.270999863 step: 4170 | loss: 4.267168502 step: 4180 | loss: 4.263339961 step: 4190 | loss: 4.259514247 step: 4200 | loss: 4.255691368 step: 4210 | loss: 4.251871331 step: 4220 | loss: 4.248054144 step: 4230 | loss: 4.244239815 step: 4240 | loss: 4.240428351 step: 4250 | loss: 4.236619760 step: 4260 | loss: 4.232814050 step: 4270 | loss: 4.229011228 step: 4280 | loss: 4.225211302 step: 4290 | loss: 4.221414281 step: 4300 | loss: 4.217620171 step: 4310 | loss: 4.213828981 step: 4320 | loss: 4.210040718 step: 4330 | loss: 4.206255390 step: 4340 | loss: 4.202473006 step: 4350 | loss: 4.198693572 step: 4360 | loss: 4.194917097 step: 4370 | loss: 4.191143590 step: 4380 | loss: 4.187373057 step: 4390 | loss: 4.183605507 step: 4400 | loss: 4.179840948 step: 4410 | loss: 4.176079388 step: 4420 | loss: 4.172320835 step: 4430 | loss: 4.168565296 step: 4440 | loss: 4.164812781 step: 4450 | loss: 4.161063297 step: 4460 | loss: 4.157316853 step: 4470 | loss: 4.153573456 step: 4480 | loss: 4.149833115 step: 4490 | loss: 4.146095837 step: 4500 | loss: 4.142361632 step: 4510 | loss: 4.138630507 step: 4520 | loss: 4.134902471 step: 4530 | loss: 4.131177532 step: 4540 | loss: 4.127455699 step: 4550 | loss: 4.123736978 step: 4560 | loss: 4.120021380 step: 4570 | loss: 4.116308913 step: 4580 | loss: 4.112599584 step: 4590 | loss: 4.108893402 step: 4600 | loss: 4.105190376 step: 4610 | loss: 4.101490514 step: 4620 | loss: 4.097793825 step: 4630 | loss: 4.094100317 step: 4640 | loss: 4.090409998 step: 4650 | loss: 4.086722878 step: 4660 | loss: 4.083038965 step: 4670 | loss: 4.079358268 step: 4680 | loss: 4.075680794 step: 4690 | loss: 4.072006553 step: 4700 | loss: 4.068335554 step: 4710 | loss: 4.064667805 step: 4720 | loss: 4.061003315 step: 4730 | loss: 4.057342092 step: 4740 | loss: 4.053684146 step: 4750 | loss: 4.050029485 step: 4760 | loss: 4.046378118 step: 4770 | loss: 4.042730054 step: 4780 | loss: 4.039085301 step: 4790 | loss: 4.035443870 step: 4800 | loss: 4.031805767 step: 4810 | loss: 4.028171003 step: 4820 | loss: 4.024539587 step: 4830 | loss: 4.020911527 step: 4840 | loss: 4.017286832 step: 4850 | loss: 4.013665512 step: 4860 | loss: 4.010047575 step: 4870 | loss: 4.006433031 step: 4880 | loss: 4.002821888 step: 4890 | loss: 3.999214156 step: 4900 | loss: 3.995609844 step: 4910 | loss: 3.992008961 step: 4920 | loss: 3.988411516 step: 4930 | loss: 3.984817519 step: 4940 | loss: 3.981226978 step: 4950 | loss: 3.977639903 step: 4960 | loss: 3.974056303 step: 4970 | loss: 3.970476188 step: 4980 | loss: 3.966899566 step: 4990 | loss: 3.963326448 step: 5000 | loss: 3.959756842 step: 5010 | loss: 3.956190758 step: 5020 | loss: 3.952628206 step: 5030 | loss: 3.949069194 step: 5040 | loss: 3.945513732 step: 5050 | loss: 3.941961830 step: 5060 | loss: 3.938413498 step: 5070 | loss: 3.934868744 step: 5080 | loss: 3.931327578 step: 5090 | loss: 3.927790010 step: 5100 | loss: 3.924256050 step: 5110 | loss: 3.920725707 step: 5120 | loss: 3.917198990 step: 5130 | loss: 3.913675910 step: 5140 | loss: 3.910156476 step: 5150 | loss: 3.906640698 step: 5160 | loss: 3.903128585 step: 5170 | loss: 3.899620148 step: 5180 | loss: 3.896115396 step: 5190 | loss: 3.892614339 step: 5200 | loss: 3.889116986 step: 5210 | loss: 3.885623348 step: 5220 | loss: 3.882133435 step: 5230 | loss: 3.878647255 step: 5240 | loss: 3.875164820 step: 5250 | loss: 3.871686140 step: 5260 | loss: 3.868211223 step: 5270 | loss: 3.864740081 step: 5280 | loss: 3.861272723 step: 5290 | loss: 3.857809159 step: 5300 | loss: 3.854349399 step: 5310 | loss: 3.850893454 step: 5320 | loss: 3.847441333 step: 5330 | loss: 3.843993046 step: 5340 | loss: 3.840548605 step: 5350 | loss: 3.837108018 step: 5360 | loss: 3.833671296 step: 5370 | loss: 3.830238450 step: 5380 | loss: 3.826809489 step: 5390 | loss: 3.823384424 step: 5400 | loss: 3.819963264 step: 5410 | loss: 3.816546021 step: 5420 | loss: 3.813132705 step: 5430 | loss: 3.809723326 step: 5440 | loss: 3.806317894 step: 5450 | loss: 3.802916420 step: 5460 | loss: 3.799518914 step: 5470 | loss: 3.796125387 step: 5480 | loss: 3.792735848 step: 5490 | loss: 3.789350309 step: 5500 | loss: 3.785968780 step: 5510 | loss: 3.782591272 step: 5520 | loss: 3.779217794 step: 5530 | loss: 3.775848358 step: 5540 | loss: 3.772482974 step: 5550 | loss: 3.769121653 step: 5560 | loss: 3.765764405 step: 5570 | loss: 3.762411242 step: 5580 | loss: 3.759062172 step: 5590 | loss: 3.755717209 step: 5600 | loss: 3.752376361 step: 5610 | loss: 3.749039639 step: 5620 | loss: 3.745707056 step: 5630 | loss: 3.742378620 step: 5640 | loss: 3.739054343 step: 5650 | loss: 3.735734236 step: 5660 | loss: 3.732418310 step: 5670 | loss: 3.729106575 step: 5680 | loss: 3.725799042 step: 5690 | loss: 3.722495722 step: 5700 | loss: 3.719196626 step: 5710 | loss: 3.715901765 step: 5720 | loss: 3.712611149 step: 5730 | loss: 3.709324791 step: 5740 | loss: 3.706042699 step: 5750 | loss: 3.702764887 step: 5760 | loss: 3.699491364 step: 5770 | loss: 3.696222141 step: 5780 | loss: 3.692957230 step: 5790 | loss: 3.689696642 step: 5800 | loss: 3.686440387 step: 5810 | loss: 3.683188477 step: 5820 | loss: 3.679940922 step: 5830 | loss: 3.676697735 step: 5840 | loss: 3.673458925 step: 5850 | loss: 3.670224504 step: 5860 | loss: 3.666994483 step: 5870 | loss: 3.663768874 step: 5880 | loss: 3.660547687 step: 5890 | loss: 3.657330933 step: 5900 | loss: 3.654118624 step: 5910 | loss: 3.650910771 step: 5920 | loss: 3.647707385 step: 5930 | loss: 3.644508477 step: 5940 | loss: 3.641314059 step: 5950 | loss: 3.638124141 step: 5960 | loss: 3.634938735 step: 5970 | loss: 3.631757852 step: 5980 | loss: 3.628581503 step: 5990 | loss: 3.625409700 step: 6000 | loss: 3.622242453 step: 6010 | loss: 3.619079775 step: 6020 | loss: 3.615921675 step: 6030 | loss: 3.612768166 step: 6040 | loss: 3.609619258 step: 6050 | loss: 3.606474964 step: 6060 | loss: 3.603335293 step: 6070 | loss: 3.600200257 step: 6080 | loss: 3.597069868 step: 6090 | loss: 3.593944137 step: 6100 | loss: 3.590823075 step: 6110 | loss: 3.587706692 step: 6120 | loss: 3.584595001 step: 6130 | loss: 3.581488012 step: 6140 | loss: 3.578385736 step: 6150 | loss: 3.575288185 step: 6160 | loss: 3.572195370 step: 6170 | loss: 3.569107301 step: 6180 | loss: 3.566023990 step: 6190 | loss: 3.562945448 step: 6200 | loss: 3.559871686 step: 6210 | loss: 3.556802714 step: 6220 | loss: 3.553738544 step: 6230 | loss: 3.550679187 step: 6240 | loss: 3.547624652 step: 6250 | loss: 3.544574952 step: 6260 | loss: 3.541530096 step: 6270 | loss: 3.538490095 step: 6280 | loss: 3.535454960 step: 6290 | loss: 3.532424701 step: 6300 | loss: 3.529399329 step: 6310 | loss: 3.526378854 step: 6320 | loss: 3.523363286 step: 6330 | loss: 3.520352635 step: 6340 | loss: 3.517346911 step: 6350 | loss: 3.514346124 step: 6360 | loss: 3.511350284 step: 6370 | loss: 3.508359399 step: 6380 | loss: 3.505373480 step: 6390 | loss: 3.502392535 step: 6400 | loss: 3.499416573 step: 6410 | loss: 3.496445603 step: 6420 | loss: 3.493479633 step: 6430 | loss: 3.490518670 step: 6440 | loss: 3.487562723 step: 6450 | loss: 3.484611799 step: 6460 | loss: 3.481665904 step: 6470 | loss: 3.478725045 step: 6480 | loss: 3.475789226 step: 6490 | loss: 3.472858454 step: 6500 | loss: 3.469932731 step: 6510 | loss: 3.467012060 step: 6520 | loss: 3.464096444 step: 6530 | loss: 3.461185883 step: 6540 | loss: 3.458280376 step: 6550 | loss: 3.455379919 step: 6560 | loss: 3.452484507 step: 6570 | loss: 3.449594130 step: 6580 | loss: 3.446708776 step: 6590 | loss: 3.443828427 step: 6600 | loss: 3.440953057 step: 6610 | loss: 3.438082631 step: 6620 | loss: 3.435217101 step: 6630 | loss: 3.432356398 step: 6640 | loss: 3.429500424 step: 6650 | loss: 3.426649034 step: 6660 | loss: 3.423802010 step: 6670 | loss: 3.420959036 step: 6680 | loss: 3.418119727 step: 6690 | loss: 3.415283797 step: 6700 | loss: 3.412451148 step: 6710 | loss: 3.409621753 step: 6720 | loss: 3.406795580 step: 6730 | loss: 3.403972587 step: 6740 | loss: 3.401152713 step: 6750 | loss: 3.398335870 step: 6760 | loss: 3.395521931 step: 6770 | loss: 3.392710701 step: 6780 | loss: 3.389901893 step: 6790 | loss: 3.387095138 step: 6800 | loss: 3.384290123 step: 6810 | loss: 3.381486732 step: 6820 | loss: 3.378684960 step: 6830 | loss: 3.375884810 step: 6840 | loss: 3.373086285 step: 6850 | loss: 3.370289390 step: 6860 | loss: 3.367494129 step: 6870 | loss: 3.364700504 step: 6880 | loss: 3.361908522 step: 6890 | loss: 3.359118184 step: 6900 | loss: 3.356329496 step: 6910 | loss: 3.353542462 step: 6920 | loss: 3.350757084 step: 6930 | loss: 3.347973368 step: 6940 | loss: 3.345191317 step: 6950 | loss: 3.342410935 step: 6960 | loss: 3.339632226 step: 6970 | loss: 3.336855195 step: 6980 | loss: 3.334079845 step: 6990 | loss: 3.331306180 step: 7000 | loss: 3.328534204 step: 7010 | loss: 3.325763922 step: 7020 | loss: 3.322995337 step: 7030 | loss: 3.320228454 step: 7040 | loss: 3.317463276 step: 7050 | loss: 3.314699807 step: 7060 | loss: 3.311938053 step: 7070 | loss: 3.309178016 step: 7080 | loss: 3.306419700 step: 7090 | loss: 3.303663111 step: 7100 | loss: 3.300908252 step: 7110 | loss: 3.298155127 step: 7120 | loss: 3.295403740 step: 7130 | loss: 3.292654095 step: 7140 | loss: 3.289906197 step: 7150 | loss: 3.287160050 step: 7160 | loss: 3.284415657 step: 7170 | loss: 3.281673023 step: 7180 | loss: 3.278932152 step: 7190 | loss: 3.276193048 step: 7200 | loss: 3.273455716 step: 7210 | loss: 3.270720159 step: 7220 | loss: 3.267986381 step: 7230 | loss: 3.265254387 step: 7240 | loss: 3.262524182 step: 7250 | loss: 3.259795768 step: 7260 | loss: 3.257069150 step: 7270 | loss: 3.254344333 step: 7280 | loss: 3.251621321 step: 7290 | loss: 3.248900117 step: 7300 | loss: 3.246180726 step: 7310 | loss: 3.243463152 step: 7320 | loss: 3.240747400 step: 7330 | loss: 3.238033473 step: 7340 | loss: 3.235321375 step: 7350 | loss: 3.232611111 step: 7360 | loss: 3.229902685 step: 7370 | loss: 3.227196102 step: 7380 | loss: 3.224491364 step: 7390 | loss: 3.221788477 step: 7400 | loss: 3.219087444 step: 7410 | loss: 3.216388270 step: 7420 | loss: 3.213690958 step: 7430 | loss: 3.210995514 step: 7440 | loss: 3.208301940 step: 7450 | loss: 3.205610241 step: 7460 | loss: 3.202920422 step: 7470 | loss: 3.200232486 step: 7480 | loss: 3.197546437 step: 7490 | loss: 3.194862279 step: 7500 | loss: 3.192180017 step: 7510 | loss: 3.189499654 step: 7520 | loss: 3.186821194 step: 7530 | loss: 3.184144641 step: 7540 | loss: 3.181470000 step: 7550 | loss: 3.178797273 step: 7560 | loss: 3.176126465 step: 7570 | loss: 3.173457580 step: 7580 | loss: 3.170790622 step: 7590 | loss: 3.168125594 step: 7600 | loss: 3.165462499 step: 7610 | loss: 3.162801342 step: 7620 | loss: 3.160142127 step: 7630 | loss: 3.157484855 step: 7640 | loss: 3.154829532 step: 7650 | loss: 3.152176161 step: 7660 | loss: 3.149524744 step: 7670 | loss: 3.146875285 step: 7680 | loss: 3.144227787 step: 7690 | loss: 3.141582253 step: 7700 | loss: 3.138938687 step: 7710 | loss: 3.136297090 step: 7720 | loss: 3.133657465 step: 7730 | loss: 3.131019815 step: 7740 | loss: 3.128384141 step: 7750 | loss: 3.125750447 step: 7760 | loss: 3.123118734 step: 7770 | loss: 3.120489002 step: 7780 | loss: 3.117861255 step: 7790 | loss: 3.115235491 step: 7800 | loss: 3.112611713 step: 7810 | loss: 3.109989920 step: 7820 | loss: 3.107370111 step: 7830 | loss: 3.104752285 step: 7840 | loss: 3.102136441 step: 7850 | loss: 3.099522576 step: 7860 | loss: 3.096910686 step: 7870 | loss: 3.094300767 step: 7880 | loss: 3.091692811 step: 7890 | loss: 3.089086812 step: 7900 | loss: 3.086482759 step: 7910 | loss: 3.083880639 step: 7920 | loss: 3.081280435 step: 7930 | loss: 3.078682125 step: 7940 | loss: 3.076085682 step: 7950 | loss: 3.073491070 step: 7960 | loss: 3.070898240 step: 7970 | loss: 3.068307126 step: 7980 | loss: 3.065717633 step: 7990 | loss: 3.063129628 step: 8000 | loss: 3.060542909 step: 8010 | loss: 3.057957178 step: 8020 | loss: 3.055372033 step: 8030 | loss: 3.052787090 step: 8040 | loss: 3.050202171 step: 8050 | loss: 3.047617252 step: 8060 | loss: 3.045032334 step: 8070 | loss: 3.042447415 step: 8080 | loss: 3.039862497 step: 8090 | loss: 3.037277578 step: 8100 | loss: 3.034692660 step: 8110 | loss: 3.032107741 step: 8120 | loss: 3.029522823 step: 8130 | loss: 3.026937905 step: 8140 | loss: 3.024352986 step: 8150 | loss: 3.021768068 step: 8160 | loss: 3.019183149 step: 8170 | loss: 3.016598231 step: 8180 | loss: 3.014013312 step: 8190 | loss: 3.011428394 step: 8200 | loss: 3.008843475 step: 8210 | loss: 3.006258557 step: 8220 | loss: 3.003673639 step: 8230 | loss: 3.001088720 step: 8240 | loss: 2.998503802 step: 8250 | loss: 2.995918883 step: 8260 | loss: 2.993333965 step: 8270 | loss: 2.990749046 step: 8280 | loss: 2.988164128 step: 8290 | loss: 2.985579210 step: 8300 | loss: 2.982994291 step: 8310 | loss: 2.980409373 step: 8320 | loss: 2.977824454 step: 8330 | loss: 2.975239536 step: 8340 | loss: 2.972654617 step: 8350 | loss: 2.970069699 step: 8360 | loss: 2.967484780 step: 8370 | loss: 2.964899862 step: 8380 | loss: 2.962314944 step: 8390 | loss: 2.959730025 step: 8400 | loss: 2.957145107 step: 8410 | loss: 2.954560188 step: 8420 | loss: 2.951975270 step: 8430 | loss: 2.949390351 step: 8440 | loss: 2.946805433 step: 8450 | loss: 2.944220515 step: 8460 | loss: 2.941635596 step: 8470 | loss: 2.939050678 step: 8480 | loss: 2.936465759 step: 8490 | loss: 2.933880841 step: 8500 | loss: 2.931295922 step: 8510 | loss: 2.928711004 step: 8520 | loss: 2.926126085 step: 8530 | loss: 2.923541167 step: 8540 | loss: 2.920956249 step: 8550 | loss: 2.918371330 step: 8560 | loss: 2.915786412 step: 8570 | loss: 2.913201493 step: 8580 | loss: 2.910616575 step: 8590 | loss: 2.908031656 step: 8600 | loss: 2.905446738 step: 8610 | loss: 2.902861819 step: 8620 | loss: 2.900276901 step: 8630 | loss: 2.897691983 step: 8640 | loss: 2.895107064 step: 8650 | loss: 2.892522146 step: 8660 | loss: 2.889937227 step: 8670 | loss: 2.887352309 step: 8680 | loss: 2.884767390 step: 8690 | loss: 2.882182472 step: 8700 | loss: 2.879597554 step: 8710 | loss: 2.877012635 step: 8720 | loss: 2.874427717 step: 8730 | loss: 2.871842798 step: 8740 | loss: 2.869257880 step: 8750 | loss: 2.866672961 step: 8760 | loss: 2.864088043 step: 8770 | loss: 2.861503124 step: 8780 | loss: 2.858918206 step: 8790 | loss: 2.856333288 step: 8800 | loss: 2.853748369 step: 8810 | loss: 2.851163451 step: 8820 | loss: 2.848578532 step: 8830 | loss: 2.845993614 step: 8840 | loss: 2.843408695 step: 8850 | loss: 2.840823777 step: 8860 | loss: 2.838238858 step: 8870 | loss: 2.835653940 step: 8880 | loss: 2.833069022 step: 8890 | loss: 2.830484103 step: 8900 | loss: 2.827899185 step: 8910 | loss: 2.825314266 step: 8920 | loss: 2.822729348 step: 8930 | loss: 2.820144429 step: 8940 | loss: 2.817559511 step: 8950 | loss: 2.814974593 step: 8960 | loss: 2.812389674 step: 8970 | loss: 2.809804756 step: 8980 | loss: 2.807219837 step: 8990 | loss: 2.804634919 step: 9000 | loss: 2.802050000 step: 9010 | loss: 2.799465082 step: 9020 | loss: 2.796880163 step: 9030 | loss: 2.794295245 step: 9040 | loss: 2.791710327 step: 9050 | loss: 2.789125408 step: 9060 | loss: 2.786540490 step: 9070 | loss: 2.783955571 step: 9080 | loss: 2.781370653 step: 9090 | loss: 2.778785734 step: 9100 | loss: 2.776200816 step: 9110 | loss: 2.773615897 step: 9120 | loss: 2.771030979 step: 9130 | loss: 2.768446061 step: 9140 | loss: 2.765861142 step: 9150 | loss: 2.763276224 step: 9160 | loss: 2.760691305 step: 9170 | loss: 2.758106387 step: 9180 | loss: 2.755521468 step: 9190 | loss: 2.752936550 step: 9200 | loss: 2.750351632 step: 9210 | loss: 2.747766713 step: 9220 | loss: 2.745181795 step: 9230 | loss: 2.742596876 step: 9240 | loss: 2.740011958 step: 9250 | loss: 2.737427039 step: 9260 | loss: 2.734842121 step: 9270 | loss: 2.732257202 step: 9280 | loss: 2.729672284 step: 9290 | loss: 2.727087366 step: 9300 | loss: 2.724502447 step: 9310 | loss: 2.721917529 step: 9320 | loss: 2.719332610 step: 9330 | loss: 2.716747692 step: 9340 | loss: 2.714162773 step: 9350 | loss: 2.711577855 step: 9360 | loss: 2.708992936 step: 9370 | loss: 2.706408018 step: 9380 | loss: 2.703823100 step: 9390 | loss: 2.701238181 step: 9400 | loss: 2.698653263 step: 9410 | loss: 2.696068344 step: 9420 | loss: 2.693483426 step: 9430 | loss: 2.690898507 step: 9440 | loss: 2.688313589 step: 9450 | loss: 2.685728671 step: 9460 | loss: 2.683143752 step: 9470 | loss: 2.680558834 step: 9480 | loss: 2.677973915 step: 9490 | loss: 2.675388997 step: 9500 | loss: 2.672804078 step: 9510 | loss: 2.670219160 step: 9520 | loss: 2.667634241 step: 9530 | loss: 2.665049323 step: 9540 | loss: 2.662464405 step: 9550 | loss: 2.659879486 step: 9560 | loss: 2.657294568 step: 9570 | loss: 2.654709649 step: 9580 | loss: 2.652124731 step: 9590 | loss: 2.649539812 step: 9600 | loss: 2.646954894 step: 9610 | loss: 2.644369976 step: 9620 | loss: 2.641785057 step: 9630 | loss: 2.639200139 step: 9640 | loss: 2.636615220 step: 9650 | loss: 2.634030302 step: 9660 | loss: 2.631445383 step: 9670 | loss: 2.628860465 step: 9680 | loss: 2.626275546 step: 9690 | loss: 2.623690628 step: 9700 | loss: 2.621105710 step: 9710 | loss: 2.618520791 step: 9720 | loss: 2.615935873 step: 9730 | loss: 2.613350954 step: 9740 | loss: 2.610766036 step: 9750 | loss: 2.608181117 step: 9760 | loss: 2.605596199 step: 9770 | loss: 2.603011280 step: 9780 | loss: 2.600426362 step: 9790 | loss: 2.597841444 step: 9800 | loss: 2.595256525 step: 9810 | loss: 2.592671607 step: 9820 | loss: 2.590086688 step: 9830 | loss: 2.587501770 step: 9840 | loss: 2.584916851 step: 9850 | loss: 2.582331933 step: 9860 | loss: 2.579747015 step: 9870 | loss: 2.577162096 step: 9880 | loss: 2.574577178 step: 9890 | loss: 2.571992259 step: 9900 | loss: 2.569407341 step: 9910 | loss: 2.566822422 step: 9920 | loss: 2.564237504 step: 9930 | loss: 2.561652585 step: 9940 | loss: 2.559067667 step: 9950 | loss: 2.556482749 step: 9960 | loss: 2.553897830 step: 9970 | loss: 2.551312912 step: 9980 | loss: 2.548727993 step: 9990 | loss: 2.546143075 step: 10000 | loss: 2.543558156 step: 10010 | loss: 2.540973238 step: 10020 | loss: 2.538388319 step: 10030 | loss: 2.535803401 step: 10040 | loss: 2.533218483 step: 10050 | loss: 2.530633564 step: 10060 | loss: 2.528048646 step: 10070 | loss: 2.525463727 step: 10080 | loss: 2.522878809 step: 10090 | loss: 2.520293890 step: 10100 | loss: 2.517708972 step: 10110 | loss: 2.515124054 step: 10120 | loss: 2.512539135 step: 10130 | loss: 2.509954217 step: 10140 | loss: 2.507369298 step: 10150 | loss: 2.504784380 step: 10160 | loss: 2.502199461 step: 10170 | loss: 2.499614543 step: 10180 | loss: 2.497029624 step: 10190 | loss: 2.494444706 step: 10200 | loss: 2.491859788 step: 10210 | loss: 2.489274869 step: 10220 | loss: 2.486689951 step: 10230 | loss: 2.484105032 step: 10240 | loss: 2.481520114 step: 10250 | loss: 2.478935195 step: 10260 | loss: 2.476350277 step: 10270 | loss: 2.473765358 step: 10280 | loss: 2.471180440 step: 10290 | loss: 2.468595522 step: 10300 | loss: 2.466010603 step: 10310 | loss: 2.463425685 step: 10320 | loss: 2.460840766 step: 10330 | loss: 2.458255848 step: 10340 | loss: 2.455670929 step: 10350 | loss: 2.453086011 step: 10360 | loss: 2.450501093 step: 10370 | loss: 2.447916174 step: 10380 | loss: 2.445331256 step: 10390 | loss: 2.442746337 step: 10400 | loss: 2.440161419 step: 10410 | loss: 2.437576500 step: 10420 | loss: 2.434991582 step: 10430 | loss: 2.432406663 step: 10440 | loss: 2.429821745 step: 10450 | loss: 2.427236827 step: 10460 | loss: 2.424651908 step: 10470 | loss: 2.422066990 step: 10480 | loss: 2.419482071 step: 10490 | loss: 2.416897153 step: 10500 | loss: 2.414312234 step: 10510 | loss: 2.411727316 step: 10520 | loss: 2.409142397 step: 10530 | loss: 2.406557479 step: 10540 | loss: 2.403972561 step: 10550 | loss: 2.401387642 step: 10560 | loss: 2.398802724 step: 10570 | loss: 2.396217805 step: 10580 | loss: 2.393632887 step: 10590 | loss: 2.391047968 step: 10600 | loss: 2.388463050 step: 10610 | loss: 2.385878132 step: 10620 | loss: 2.383293213 step: 10630 | loss: 2.380708295 step: 10640 | loss: 2.378123376 step: 10650 | loss: 2.375538458 step: 10660 | loss: 2.372953539 step: 10670 | loss: 2.370368621 step: 10680 | loss: 2.367783702 step: 10690 | loss: 2.365198784 step: 10700 | loss: 2.362613866 step: 10710 | loss: 2.360028947 step: 10720 | loss: 2.357444029 step: 10730 | loss: 2.354859110 step: 10740 | loss: 2.352274192 step: 10750 | loss: 2.349689273 step: 10760 | loss: 2.347104355 step: 10770 | loss: 2.344519436 step: 10780 | loss: 2.341934518 step: 10790 | loss: 2.339349600 step: 10800 | loss: 2.336764681 step: 10810 | loss: 2.334179763 step: 10820 | loss: 2.331594844 step: 10830 | loss: 2.329009926 step: 10840 | loss: 2.326425007 step: 10850 | loss: 2.323840089 step: 10860 | loss: 2.321255171 step: 10870 | loss: 2.318670252 step: 10880 | loss: 2.316085334 step: 10890 | loss: 2.313500415 step: 10900 | loss: 2.310915497 step: 10910 | loss: 2.308330578 step: 10920 | loss: 2.305745660 step: 10930 | loss: 2.303160741 step: 10940 | loss: 2.300575823 step: 10950 | loss: 2.297990905 step: 10960 | loss: 2.295405986 step: 10970 | loss: 2.292821068 step: 10980 | loss: 2.290236149 step: 10990 | loss: 2.287651231 step: 11000 | loss: 2.285066312 step: 11010 | loss: 2.282481394 step: 11020 | loss: 2.279896476 step: 11030 | loss: 2.277311557 step: 11040 | loss: 2.274726639 step: 11050 | loss: 2.272141720 step: 11060 | loss: 2.269556802 step: 11070 | loss: 2.266971883 step: 11080 | loss: 2.264386965 step: 11090 | loss: 2.261802046 step: 11100 | loss: 2.259217128 step: 11110 | loss: 2.256632210 step: 11120 | loss: 2.254047291 step: 11130 | loss: 2.251462373 step: 11140 | loss: 2.248877454 step: 11150 | loss: 2.246292536 step: 11160 | loss: 2.243707617 step: 11170 | loss: 2.241122699 step: 11180 | loss: 2.238537780 step: 11190 | loss: 2.235952862 step: 11200 | loss: 2.233367944 step: 11210 | loss: 2.230783025 step: 11220 | loss: 2.228198107 step: 11230 | loss: 2.225613188 step: 11240 | loss: 2.223028270 step: 11250 | loss: 2.220443351 step: 11260 | loss: 2.217858433 step: 11270 | loss: 2.215273515 step: 11280 | loss: 2.212688596 step: 11290 | loss: 2.210103678 step: 11300 | loss: 2.207518759 step: 11310 | loss: 2.204933841 step: 11320 | loss: 2.202348922 step: 11330 | loss: 2.199764004 step: 11340 | loss: 2.197179085 step: 11350 | loss: 2.194594167 step: 11360 | loss: 2.192009249 step: 11370 | loss: 2.189424330 step: 11380 | loss: 2.186839412 step: 11390 | loss: 2.184254493 step: 11400 | loss: 2.181669575 step: 11410 | loss: 2.179084656 step: 11420 | loss: 2.176499738 step: 11430 | loss: 2.173914819 step: 11440 | loss: 2.171329901 step: 11450 | loss: 2.168744983 step: 11460 | loss: 2.166160064 step: 11470 | loss: 2.163575146 step: 11480 | loss: 2.160990227 step: 11490 | loss: 2.158405309 step: 11500 | loss: 2.155820390 step: 11510 | loss: 2.153235472 step: 11520 | loss: 2.150650554 step: 11530 | loss: 2.148065635 step: 11540 | loss: 2.145480717 step: 11550 | loss: 2.142895798 step: 11560 | loss: 2.140310880 step: 11570 | loss: 2.137725961 step: 11580 | loss: 2.135141043 step: 11590 | loss: 2.132556124 step: 11600 | loss: 2.129971206 step: 11610 | loss: 2.127386288 step: 11620 | loss: 2.124801369 step: 11630 | loss: 2.122216451 step: 11640 | loss: 2.119631532 step: 11650 | loss: 2.117046614 step: 11660 | loss: 2.114461695 step: 11670 | loss: 2.111876777 step: 11680 | loss: 2.109291858 step: 11690 | loss: 2.106706940 step: 11700 | loss: 2.104122022 step: 11710 | loss: 2.101537103 step: 11720 | loss: 2.098952185 step: 11730 | loss: 2.096367266 step: 11740 | loss: 2.093782348 step: 11750 | loss: 2.091197429 step: 11760 | loss: 2.088612511 step: 11770 | loss: 2.086027593 step: 11780 | loss: 2.083442674 step: 11790 | loss: 2.080857756 step: 11800 | loss: 2.078272837 step: 11810 | loss: 2.075687919 step: 11820 | loss: 2.073103000 step: 11830 | loss: 2.070518082 step: 11840 | loss: 2.067933163 step: 11850 | loss: 2.065348245 step: 11860 | loss: 2.062763327 step: 11870 | loss: 2.060178408 step: 11880 | loss: 2.057593490 step: 11890 | loss: 2.055008571 step: 11900 | loss: 2.052423653 step: 11910 | loss: 2.049838734 step: 11920 | loss: 2.047253816 step: 11930 | loss: 2.044668897 step: 11940 | loss: 2.042083979 step: 11950 | loss: 2.039499061 step: 11960 | loss: 2.036914142 step: 11970 | loss: 2.034329224 step: 11980 | loss: 2.031744305 step: 11990 | loss: 2.029159387 step: 12000 | loss: 2.026574468 step: 12010 | loss: 2.023989550 step: 12020 | loss: 2.021404632 step: 12030 | loss: 2.018819713 step: 12040 | loss: 2.016234795 step: 12050 | loss: 2.013649876 step: 12060 | loss: 2.011064958 step: 12070 | loss: 2.008480039 step: 12080 | loss: 2.005895121 step: 12090 | loss: 2.003310202 step: 12100 | loss: 2.000725284 step: 12110 | loss: 1.998140366 step: 12120 | loss: 1.995555447 step: 12130 | loss: 1.992970529 step: 12140 | loss: 1.990385610 step: 12150 | loss: 1.987800692 step: 12160 | loss: 1.985215773 step: 12170 | loss: 1.982630855 step: 12180 | loss: 1.980045937 step: 12190 | loss: 1.977461018 step: 12200 | loss: 1.974876100 step: 12210 | loss: 1.972291181 step: 12220 | loss: 1.969706263 step: 12230 | loss: 1.967121344 step: 12240 | loss: 1.964536426 step: 12250 | loss: 1.961951507 step: 12260 | loss: 1.959366589 step: 12270 | loss: 1.956781671 step: 12280 | loss: 1.954196752 step: 12290 | loss: 1.951611834 step: 12300 | loss: 1.949026915 step: 12310 | loss: 1.946441997 step: 12320 | loss: 1.943857078 step: 12330 | loss: 1.941272160 step: 12340 | loss: 1.938687241 step: 12350 | loss: 1.936102323 step: 12360 | loss: 1.933517405 step: 12370 | loss: 1.930932486 step: 12380 | loss: 1.928347568 step: 12390 | loss: 1.925762649 step: 12400 | loss: 1.923177731 step: 12410 | loss: 1.920592812 step: 12420 | loss: 1.918007894 step: 12430 | loss: 1.915422976 step: 12440 | loss: 1.912838057 step: 12450 | loss: 1.910253139 step: 12460 | loss: 1.907668220 step: 12470 | loss: 1.905083302 step: 12480 | loss: 1.902498383 step: 12490 | loss: 1.899913465 step: 12500 | loss: 1.897328546 step: 12510 | loss: 1.894743628 step: 12520 | loss: 1.892158710 step: 12530 | loss: 1.889573791 step: 12540 | loss: 1.886988873 step: 12550 | loss: 1.884403954 step: 12560 | loss: 1.881819036 step: 12570 | loss: 1.879234117 step: 12580 | loss: 1.876649199 step: 12590 | loss: 1.874064280 step: 12600 | loss: 1.871479362 step: 12610 | loss: 1.868894444 step: 12620 | loss: 1.866309525 step: 12630 | loss: 1.863724607 step: 12640 | loss: 1.861139688 step: 12650 | loss: 1.858554770 step: 12660 | loss: 1.855969851 step: 12670 | loss: 1.853384933 step: 12680 | loss: 1.850800015 step: 12690 | loss: 1.848215096 step: 12700 | loss: 1.845630178 step: 12710 | loss: 1.843045259 step: 12720 | loss: 1.840460341 step: 12730 | loss: 1.837875422 step: 12740 | loss: 1.835290504 step: 12750 | loss: 1.832705585 step: 12760 | loss: 1.830120667 step: 12770 | loss: 1.827535749 step: 12780 | loss: 1.824950830 step: 12790 | loss: 1.822365912 step: 12800 | loss: 1.819780993 step: 12810 | loss: 1.817196075 step: 12820 | loss: 1.814611156 step: 12830 | loss: 1.812026238 step: 12840 | loss: 1.809441319 step: 12850 | loss: 1.806856401 step: 12860 | loss: 1.804271483 step: 12870 | loss: 1.801686564 step: 12880 | loss: 1.799101646 step: 12890 | loss: 1.796516727 step: 12900 | loss: 1.793931809 step: 12910 | loss: 1.791346890 step: 12920 | loss: 1.788761972 step: 12930 | loss: 1.786177054 step: 12940 | loss: 1.783592135 step: 12950 | loss: 1.781007217 step: 12960 | loss: 1.778422298 step: 12970 | loss: 1.775837380 step: 12980 | loss: 1.773252461 step: 12990 | loss: 1.770667543 step: 13000 | loss: 1.768082624 step: 13010 | loss: 1.765497706 step: 13020 | loss: 1.762912788 step: 13030 | loss: 1.760327869 step: 13040 | loss: 1.757742951 step: 13050 | loss: 1.755158032 step: 13060 | loss: 1.752573114 step: 13070 | loss: 1.749988195 step: 13080 | loss: 1.747403277 step: 13090 | loss: 1.744818358 step: 13100 | loss: 1.742233440 step: 13110 | loss: 1.739648522 step: 13120 | loss: 1.737063603 step: 13130 | loss: 1.734478685 step: 13140 | loss: 1.731893766 step: 13150 | loss: 1.729308848 step: 13160 | loss: 1.726723929 step: 13170 | loss: 1.724139011 step: 13180 | loss: 1.721554093 step: 13190 | loss: 1.718969174 step: 13200 | loss: 1.716384256 step: 13210 | loss: 1.713799337 step: 13220 | loss: 1.711214419 step: 13230 | loss: 1.708629500 step: 13240 | loss: 1.706044582 step: 13250 | loss: 1.703459663 step: 13260 | loss: 1.700874745 step: 13270 | loss: 1.698289827 step: 13280 | loss: 1.695704908 step: 13290 | loss: 1.693119990 step: 13300 | loss: 1.690535071 step: 13310 | loss: 1.687950153 step: 13320 | loss: 1.685365234 step: 13330 | loss: 1.682780316 step: 13340 | loss: 1.680195397 step: 13350 | loss: 1.677610479 step: 13360 | loss: 1.675025561 step: 13370 | loss: 1.672440642 step: 13380 | loss: 1.669855724 step: 13390 | loss: 1.667270805 step: 13400 | loss: 1.664685887 step: 13410 | loss: 1.662100968 step: 13420 | loss: 1.659516050 step: 13430 | loss: 1.656931132 step: 13440 | loss: 1.654346213 step: 13450 | loss: 1.651761295 step: 13460 | loss: 1.649176376 step: 13470 | loss: 1.646591458 step: 13480 | loss: 1.644006539 step: 13490 | loss: 1.641421621 step: 13500 | loss: 1.638836702 step: 13510 | loss: 1.636251784 step: 13520 | loss: 1.633666866 step: 13530 | loss: 1.631081947 step: 13540 | loss: 1.628497029 step: 13550 | loss: 1.625912110 step: 13560 | loss: 1.623327192 step: 13570 | loss: 1.620742273 step: 13580 | loss: 1.618157355 step: 13590 | loss: 1.615572437 step: 13600 | loss: 1.612987518 step: 13610 | loss: 1.610402600 step: 13620 | loss: 1.607817681 step: 13630 | loss: 1.605232763 step: 13640 | loss: 1.602647844 step: 13650 | loss: 1.600062926 step: 13660 | loss: 1.597478007 step: 13670 | loss: 1.594893089 step: 13680 | loss: 1.592308171 step: 13690 | loss: 1.589723252 step: 13700 | loss: 1.587138334 step: 13710 | loss: 1.584553415 step: 13720 | loss: 1.581968497 step: 13730 | loss: 1.579383578 step: 13740 | loss: 1.576798660 step: 13750 | loss: 1.574213741 step: 13760 | loss: 1.571628823 step: 13770 | loss: 1.569043905 step: 13780 | loss: 1.566458986 step: 13790 | loss: 1.563874068 step: 13800 | loss: 1.561289149 step: 13810 | loss: 1.558704231 step: 13820 | loss: 1.556119312 step: 13830 | loss: 1.553534394 step: 13840 | loss: 1.550949476 step: 13850 | loss: 1.548364557 step: 13860 | loss: 1.545779639 step: 13870 | loss: 1.543194720 step: 13880 | loss: 1.540609802 step: 13890 | loss: 1.538024883 step: 13900 | loss: 1.535439965 step: 13910 | loss: 1.532855046 step: 13920 | loss: 1.530270128 step: 13930 | loss: 1.527685210 step: 13940 | loss: 1.525100291 step: 13950 | loss: 1.522515373 step: 13960 | loss: 1.519930454 step: 13970 | loss: 1.517345536 step: 13980 | loss: 1.514760617 step: 13990 | loss: 1.512175699 step: 14000 | loss: 1.509590780 step: 14010 | loss: 1.507005862 step: 14020 | loss: 1.504420944 step: 14030 | loss: 1.501836025 step: 14040 | loss: 1.499251107 step: 14050 | loss: 1.496666188 step: 14060 | loss: 1.494081270 step: 14070 | loss: 1.491496351 step: 14080 | loss: 1.488911433 step: 14090 | loss: 1.486326515 step: 14100 | loss: 1.483741596 step: 14110 | loss: 1.481156678 step: 14120 | loss: 1.478571759 step: 14130 | loss: 1.475986841 step: 14140 | loss: 1.473401922 step: 14150 | loss: 1.470817004 step: 14160 | loss: 1.468232085 step: 14170 | loss: 1.465647167 step: 14180 | loss: 1.463062249 step: 14190 | loss: 1.460477330 step: 14200 | loss: 1.457892412 step: 14210 | loss: 1.455307493 step: 14220 | loss: 1.452722575 step: 14230 | loss: 1.450137656 step: 14240 | loss: 1.447552738 step: 14250 | loss: 1.444967819 step: 14260 | loss: 1.442382901 step: 14270 | loss: 1.439797983 step: 14280 | loss: 1.437213064 step: 14290 | loss: 1.434628146 step: 14300 | loss: 1.432043227 step: 14310 | loss: 1.429458309 step: 14320 | loss: 1.426873390 step: 14330 | loss: 1.424288472 step: 14340 | loss: 1.421703554 step: 14350 | loss: 1.419118635 step: 14360 | loss: 1.416533717 step: 14370 | loss: 1.413948798 step: 14380 | loss: 1.411363880 step: 14390 | loss: 1.408778961 step: 14400 | loss: 1.406194043 step: 14410 | loss: 1.403609124 step: 14420 | loss: 1.401024206 step: 14430 | loss: 1.398439288 step: 14440 | loss: 1.395854369 step: 14450 | loss: 1.393269451 step: 14460 | loss: 1.390684532 step: 14470 | loss: 1.388099614 step: 14480 | loss: 1.385514695 step: 14490 | loss: 1.382929777 step: 14500 | loss: 1.380344858 step: 14510 | loss: 1.377759940 step: 14520 | loss: 1.375175022 step: 14530 | loss: 1.372590103 step: 14540 | loss: 1.370005185 step: 14550 | loss: 1.367420266 step: 14560 | loss: 1.364835348 step: 14570 | loss: 1.362250429 step: 14580 | loss: 1.359665511 step: 14590 | loss: 1.357080593 step: 14600 | loss: 1.354495674 step: 14610 | loss: 1.351910756 step: 14620 | loss: 1.349325837 step: 14630 | loss: 1.346740919 step: 14640 | loss: 1.344156000 step: 14650 | loss: 1.341571082 step: 14660 | loss: 1.338986163 step: 14670 | loss: 1.336401245 step: 14680 | loss: 1.333816327 step: 14690 | loss: 1.331231408 step: 14700 | loss: 1.328646490 step: 14710 | loss: 1.326061571 step: 14720 | loss: 1.323476653 step: 14730 | loss: 1.320891734 step: 14740 | loss: 1.318306816 step: 14750 | loss: 1.315721898 step: 14760 | loss: 1.313136979 step: 14770 | loss: 1.310552061 step: 14780 | loss: 1.307967142 step: 14790 | loss: 1.305382224 step: 14800 | loss: 1.302797305 step: 14810 | loss: 1.300212387 step: 14820 | loss: 1.297627468 step: 14830 | loss: 1.295042550 step: 14840 | loss: 1.292457632 step: 14850 | loss: 1.289872713 step: 14860 | loss: 1.287287795 step: 14870 | loss: 1.284702876 step: 14880 | loss: 1.282117958 step: 14890 | loss: 1.279533039 step: 14900 | loss: 1.276948121 step: 14910 | loss: 1.274363202 step: 14920 | loss: 1.271778284 step: 14930 | loss: 1.269193366 step: 14940 | loss: 1.266608447 step: 14950 | loss: 1.264023529 step: 14960 | loss: 1.261438610 step: 14970 | loss: 1.258853692 step: 14980 | loss: 1.256268773 step: 14990 | loss: 1.253683855 step: 15000 | loss: 1.251098937 step: 15010 | loss: 1.248514018 step: 15020 | loss: 1.245929100 step: 15030 | loss: 1.243344181 step: 15040 | loss: 1.240759263 step: 15050 | loss: 1.238174344 step: 15060 | loss: 1.235589426 step: 15070 | loss: 1.233004507 step: 15080 | loss: 1.230419589 step: 15090 | loss: 1.227834671 step: 15100 | loss: 1.225249752 step: 15110 | loss: 1.222664834 step: 15120 | loss: 1.220079915 step: 15130 | loss: 1.217494997 step: 15140 | loss: 1.214910078 step: 15150 | loss: 1.212325160 step: 15160 | loss: 1.209740241 step: 15170 | loss: 1.207155323 step: 15180 | loss: 1.204570405 step: 15190 | loss: 1.201985486 step: 15200 | loss: 1.199400568 step: 15210 | loss: 1.196815649 step: 15220 | loss: 1.194230731 step: 15230 | loss: 1.191645812 step: 15240 | loss: 1.189060894 step: 15250 | loss: 1.186475976 step: 15260 | loss: 1.183891057 step: 15270 | loss: 1.181306139 step: 15280 | loss: 1.178721220 step: 15290 | loss: 1.176136302 step: 15300 | loss: 1.173551383 step: 15310 | loss: 1.170966465 step: 15320 | loss: 1.168381546 step: 15330 | loss: 1.165796628 step: 15340 | loss: 1.163211710 step: 15350 | loss: 1.160626791 step: 15360 | loss: 1.158041873 step: 15370 | loss: 1.155456954 step: 15380 | loss: 1.152872036 step: 15390 | loss: 1.150287117 step: 15400 | loss: 1.147702199 step: 15410 | loss: 1.145117280 step: 15420 | loss: 1.142532362 step: 15430 | loss: 1.139947444 step: 15440 | loss: 1.137362525 step: 15450 | loss: 1.134777607 step: 15460 | loss: 1.132192688 step: 15470 | loss: 1.129607770 step: 15480 | loss: 1.127022851 step: 15490 | loss: 1.124437933 step: 15500 | loss: 1.121853015 step: 15510 | loss: 1.119268096 step: 15520 | loss: 1.116683178 step: 15530 | loss: 1.114098259 step: 15540 | loss: 1.111513341 step: 15550 | loss: 1.108928422 step: 15560 | loss: 1.106343504 step: 15570 | loss: 1.103758585 step: 15580 | loss: 1.101173667 step: 15590 | loss: 1.098588749 step: 15600 | loss: 1.096003830 step: 15610 | loss: 1.093418912 step: 15620 | loss: 1.090833993 step: 15630 | loss: 1.088249075 step: 15640 | loss: 1.085664156 step: 15650 | loss: 1.083079238 step: 15660 | loss: 1.080494319 step: 15670 | loss: 1.077909401 step: 15680 | loss: 1.075324483 step: 15690 | loss: 1.072739564 step: 15700 | loss: 1.070154646 step: 15710 | loss: 1.067569727 step: 15720 | loss: 1.064984809 step: 15730 | loss: 1.062399890 step: 15740 | loss: 1.059814972 step: 15750 | loss: 1.057230054 step: 15760 | loss: 1.054645135 step: 15770 | loss: 1.052060217 step: 15780 | loss: 1.049475298 step: 15790 | loss: 1.046890380 step: 15800 | loss: 1.044305461 step: 15810 | loss: 1.041720543 step: 15820 | loss: 1.039135624 step: 15830 | loss: 1.036550706 step: 15840 | loss: 1.033965788 step: 15850 | loss: 1.031380869 step: 15860 | loss: 1.028795951 step: 15870 | loss: 1.026211032 step: 15880 | loss: 1.023626114 step: 15890 | loss: 1.021041195 step: 15900 | loss: 1.018456277 step: 15910 | loss: 1.015871358 step: 15920 | loss: 1.013286440 step: 15930 | loss: 1.010701522 step: 15940 | loss: 1.008116603 step: 15950 | loss: 1.005531685 step: 15960 | loss: 1.002946766 step: 15970 | loss: 1.000361848 step: 15980 | loss: 0.997776929 step: 15990 | loss: 0.995192011 step: 16000 | loss: 0.992607093 step: 16010 | loss: 0.990022174 step: 16020 | loss: 0.987437256 step: 16030 | loss: 0.984852337 step: 16040 | loss: 0.982267419 step: 16050 | loss: 0.979682500 step: 16060 | loss: 0.977097582 step: 16070 | loss: 0.974512663 step: 16080 | loss: 0.971927745 step: 16090 | loss: 0.969342827 step: 16100 | loss: 0.966757908 step: 16110 | loss: 0.964172990 step: 16120 | loss: 0.961588071 step: 16130 | loss: 0.959003153 step: 16140 | loss: 0.956418234 step: 16150 | loss: 0.953833316 step: 16160 | loss: 0.951248398 step: 16170 | loss: 0.948663479 step: 16180 | loss: 0.946078561 step: 16190 | loss: 0.943493642 step: 16200 | loss: 0.940908724 step: 16210 | loss: 0.938323805 step: 16220 | loss: 0.935738887 step: 16230 | loss: 0.933153968 step: 16240 | loss: 0.930569050 step: 16250 | loss: 0.927984132 step: 16260 | loss: 0.925399213 step: 16270 | loss: 0.922814295 step: 16280 | loss: 0.920229376 step: 16290 | loss: 0.917644458 step: 16300 | loss: 0.915059539 step: 16310 | loss: 0.912474621 step: 16320 | loss: 0.909889702 step: 16330 | loss: 0.907304784 step: 16340 | loss: 0.904719866 step: 16350 | loss: 0.902134947 step: 16360 | loss: 0.899550029 step: 16370 | loss: 0.896965110 step: 16380 | loss: 0.894380192 step: 16390 | loss: 0.891795273 step: 16400 | loss: 0.889210355 step: 16410 | loss: 0.886625437 step: 16420 | loss: 0.884040518 step: 16430 | loss: 0.881455600 step: 16440 | loss: 0.878870681 step: 16450 | loss: 0.876285763 step: 16460 | loss: 0.873700844 step: 16470 | loss: 0.871115926 step: 16480 | loss: 0.868531007 step: 16490 | loss: 0.865946089 step: 16500 | loss: 0.863361171 step: 16510 | loss: 0.860776252 step: 16520 | loss: 0.858191334 step: 16530 | loss: 0.855606415 step: 16540 | loss: 0.853021497 step: 16550 | loss: 0.850436578 step: 16560 | loss: 0.847851660 step: 16570 | loss: 0.845266741 step: 16580 | loss: 0.842681823 step: 16590 | loss: 0.840096905 step: 16600 | loss: 0.837511986 step: 16610 | loss: 0.834927068 step: 16620 | loss: 0.832342149 step: 16630 | loss: 0.829757231 step: 16640 | loss: 0.827172312 step: 16650 | loss: 0.824587394 step: 16660 | loss: 0.822002476 step: 16670 | loss: 0.819417557 step: 16680 | loss: 0.816832639 step: 16690 | loss: 0.814247720 step: 16700 | loss: 0.811662802 step: 16710 | loss: 0.809077883 step: 16720 | loss: 0.806492965 step: 16730 | loss: 0.803908046 step: 16740 | loss: 0.801323128 step: 16750 | loss: 0.798738210 step: 16760 | loss: 0.796153291 step: 16770 | loss: 0.793568373 step: 16780 | loss: 0.790983454 step: 16790 | loss: 0.788398536 step: 16800 | loss: 0.785813617 step: 16810 | loss: 0.783228699 step: 16820 | loss: 0.780643780 step: 16830 | loss: 0.778058862 step: 16840 | loss: 0.775473944 step: 16850 | loss: 0.772889025 step: 16860 | loss: 0.770304107 step: 16870 | loss: 0.767719188 step: 16880 | loss: 0.765134270 step: 16890 | loss: 0.762549351 step: 16900 | loss: 0.759964433 step: 16910 | loss: 0.757379515 step: 16920 | loss: 0.754794596 step: 16930 | loss: 0.752209678 step: 16940 | loss: 0.749624759 step: 16950 | loss: 0.747039841 step: 16960 | loss: 0.744454922 step: 16970 | loss: 0.741870004 step: 16980 | loss: 0.739285085 step: 16990 | loss: 0.736700167 step: 17000 | loss: 0.734115249 step: 17010 | loss: 0.731530330 step: 17020 | loss: 0.728945412 step: 17030 | loss: 0.726360493 step: 17040 | loss: 0.723775575 step: 17050 | loss: 0.721190656 step: 17060 | loss: 0.718605738 step: 17070 | loss: 0.716020819 step: 17080 | loss: 0.713435901 step: 17090 | loss: 0.710850983 step: 17100 | loss: 0.708266064 step: 17110 | loss: 0.705681146 step: 17120 | loss: 0.703096227 step: 17130 | loss: 0.700511309 step: 17140 | loss: 0.697926390 step: 17150 | loss: 0.695341472 step: 17160 | loss: 0.692756554 step: 17170 | loss: 0.690171635 step: 17180 | loss: 0.687586717 step: 17190 | loss: 0.685001798 step: 17200 | loss: 0.682416880 step: 17210 | loss: 0.679831961 step: 17220 | loss: 0.677247043 step: 17230 | loss: 0.674662124 step: 17240 | loss: 0.672077206 step: 17250 | loss: 0.669492288 step: 17260 | loss: 0.666907369 step: 17270 | loss: 0.664322451 step: 17280 | loss: 0.661737532 step: 17290 | loss: 0.659152614 step: 17300 | loss: 0.656567695 step: 17310 | loss: 0.653982777 step: 17320 | loss: 0.651397859 step: 17330 | loss: 0.648812940 step: 17340 | loss: 0.646228022 step: 17350 | loss: 0.643643103 step: 17360 | loss: 0.641058185 step: 17370 | loss: 0.638473266 step: 17380 | loss: 0.635888348 step: 17390 | loss: 0.633303429 step: 17400 | loss: 0.630718511 step: 17410 | loss: 0.628133593 step: 17420 | loss: 0.625548674 step: 17430 | loss: 0.622963756 step: 17440 | loss: 0.620378837 step: 17450 | loss: 0.617793919 step: 17460 | loss: 0.615209000 step: 17470 | loss: 0.612624082 step: 17480 | loss: 0.610039163 step: 17490 | loss: 0.607454245 step: 17500 | loss: 0.604869327 step: 17510 | loss: 0.602284408 step: 17520 | loss: 0.599699490 step: 17530 | loss: 0.597114571 step: 17540 | loss: 0.594529653 step: 17550 | loss: 0.591944734 step: 17560 | loss: 0.589359816 step: 17570 | loss: 0.586774898 step: 17580 | loss: 0.584189979 step: 17590 | loss: 0.581605061 step: 17600 | loss: 0.579020142 step: 17610 | loss: 0.576435224 step: 17620 | loss: 0.573850305 step: 17630 | loss: 0.571265387 step: 17640 | loss: 0.568680468 step: 17650 | loss: 0.566095550 step: 17660 | loss: 0.563510632 step: 17670 | loss: 0.560925713 step: 17680 | loss: 0.558340795 step: 17690 | loss: 0.555755876 step: 17700 | loss: 0.553170958 step: 17710 | loss: 0.550586039 step: 17720 | loss: 0.548001121 step: 17730 | loss: 0.545416202 step: 17740 | loss: 0.542831284 step: 17750 | loss: 0.540246366 step: 17760 | loss: 0.537661447 step: 17770 | loss: 0.535076529 step: 17780 | loss: 0.532491610 step: 17790 | loss: 0.529906692 step: 17800 | loss: 0.527321773 step: 17810 | loss: 0.524736855 step: 17820 | loss: 0.522151937 step: 17830 | loss: 0.519567018 step: 17840 | loss: 0.516982100 step: 17850 | loss: 0.514397181 step: 17860 | loss: 0.511812263 step: 17870 | loss: 0.509227344 step: 17880 | loss: 0.506642426 step: 17890 | loss: 0.504057507 step: 17900 | loss: 0.501472589 step: 17910 | loss: 0.498887671 step: 17920 | loss: 0.496302752 step: 17930 | loss: 0.493717834 step: 17940 | loss: 0.491132915 step: 17950 | loss: 0.488547997 step: 17960 | loss: 0.485963078 step: 17970 | loss: 0.483378160 step: 17980 | loss: 0.480793241 step: 17990 | loss: 0.478208323 step: 18000 | loss: 0.475623405 step: 18010 | loss: 0.473038486 step: 18020 | loss: 0.470453568 step: 18030 | loss: 0.467868649 step: 18040 | loss: 0.465283731 step: 18050 | loss: 0.462698812 step: 18060 | loss: 0.460113894 step: 18070 | loss: 0.457528976 step: 18080 | loss: 0.454944057 step: 18090 | loss: 0.452359139 step: 18100 | loss: 0.449774220 step: 18110 | loss: 0.447189302 step: 18120 | loss: 0.444604383 step: 18130 | loss: 0.442019465 step: 18140 | loss: 0.439434546 step: 18150 | loss: 0.436849628 step: 18160 | loss: 0.434264710 step: 18170 | loss: 0.431679791 step: 18180 | loss: 0.429094873 step: 18190 | loss: 0.426509954 step: 18200 | loss: 0.423925036 step: 18210 | loss: 0.421340117 step: 18220 | loss: 0.418755199 step: 18230 | loss: 0.416170280 step: 18240 | loss: 0.413585362 step: 18250 | loss: 0.411000444 step: 18260 | loss: 0.408415525 step: 18270 | loss: 0.405830607 step: 18280 | loss: 0.403245688 step: 18290 | loss: 0.400660770 step: 18300 | loss: 0.398075851 step: 18310 | loss: 0.395490933 step: 18320 | loss: 0.392906015 step: 18330 | loss: 0.390321096 step: 18340 | loss: 0.387736178 step: 18350 | loss: 0.385151259 step: 18360 | loss: 0.382566341 step: 18370 | loss: 0.379981422 step: 18380 | loss: 0.377396504 step: 18390 | loss: 0.374811585 step: 18400 | loss: 0.372226667 step: 18410 | loss: 0.369641749 step: 18420 | loss: 0.367056830 step: 18430 | loss: 0.364471912 step: 18440 | loss: 0.361886993 step: 18450 | loss: 0.359302075 step: 18460 | loss: 0.356717156 step: 18470 | loss: 0.354132238 step: 18480 | loss: 0.351547319 step: 18490 | loss: 0.348962401 step: 18500 | loss: 0.346377483 step: 18510 | loss: 0.343792564 step: 18520 | loss: 0.341207646 step: 18530 | loss: 0.338622727 step: 18540 | loss: 0.336037809 step: 18550 | loss: 0.333452890 step: 18560 | loss: 0.330867972 step: 18570 | loss: 0.328283054 step: 18580 | loss: 0.325698135 step: 18590 | loss: 0.323113217 step: 18600 | loss: 0.320528298 step: 18610 | loss: 0.317943380 step: 18620 | loss: 0.315358461 step: 18630 | loss: 0.312773543 step: 18640 | loss: 0.310188624 step: 18650 | loss: 0.307603706 step: 18660 | loss: 0.305018788 step: 18670 | loss: 0.302433869 step: 18680 | loss: 0.299848951 step: 18690 | loss: 0.297264032 step: 18700 | loss: 0.294679114 step: 18710 | loss: 0.292094195 step: 18720 | loss: 0.289509277 step: 18730 | loss: 0.286924359 step: 18740 | loss: 0.284339440 step: 18750 | loss: 0.281754522 step: 18760 | loss: 0.279169603 step: 18770 | loss: 0.276584685 step: 18780 | loss: 0.273999766 step: 18790 | loss: 0.271414848 step: 18800 | loss: 0.268829929 step: 18810 | loss: 0.266245011 step: 18820 | loss: 0.263660093 step: 18830 | loss: 0.261075174 step: 18840 | loss: 0.258490256 step: 18850 | loss: 0.255905337 step: 18860 | loss: 0.253320419 step: 18870 | loss: 0.250735500 step: 18880 | loss: 0.248150582 step: 18890 | loss: 0.245565663 step: 18900 | loss: 0.242980745 step: 18910 | loss: 0.240395827 step: 18920 | loss: 0.237810908 step: 18930 | loss: 0.235225990 step: 18940 | loss: 0.232641071 step: 18950 | loss: 0.230056153 step: 18960 | loss: 0.227471234 step: 18970 | loss: 0.224886316 step: 18980 | loss: 0.222301398 step: 18990 | loss: 0.219716479 step: 19000 | loss: 0.217131561 step: 19010 | loss: 0.214546642 step: 19020 | loss: 0.211961724 step: 19030 | loss: 0.209376805 step: 19040 | loss: 0.206791887 step: 19050 | loss: 0.204206968 step: 19060 | loss: 0.201622050 step: 19070 | loss: 0.199037132 step: 19080 | loss: 0.196452213 step: 19090 | loss: 0.193867295 step: 19100 | loss: 0.191282376 step: 19110 | loss: 0.188697458 step: 19120 | loss: 0.186112539 step: 19130 | loss: 0.183527621 step: 19140 | loss: 0.180942702 step: 19150 | loss: 0.178357784 step: 19160 | loss: 0.175772866 step: 19170 | loss: 0.173187947 step: 19180 | loss: 0.170603029 step: 19190 | loss: 0.168018110 step: 19200 | loss: 0.165433192 step: 19210 | loss: 0.162848273 step: 19220 | loss: 0.160263355 step: 19230 | loss: 0.157678437 step: 19240 | loss: 0.155093518 step: 19250 | loss: 0.152508600 step: 19260 | loss: 0.149923681 step: 19270 | loss: 0.147338763 step: 19280 | loss: 0.144753844 step: 19290 | loss: 0.142168926 step: 19300 | loss: 0.139584007 step: 19310 | loss: 0.136999089 step: 19320 | loss: 0.134414171 step: 19330 | loss: 0.131829252 step: 19340 | loss: 0.129244334 step: 19350 | loss: 0.126659415 step: 19360 | loss: 0.124074497 step: 19370 | loss: 0.121489578 step: 19380 | loss: 0.118904660 step: 19390 | loss: 0.116319741 step: 19400 | loss: 0.113734823 step: 19410 | loss: 0.111149905 step: 19420 | loss: 0.108564986 step: 19430 | loss: 0.105980068 step: 19440 | loss: 0.103395149 step: 19450 | loss: 0.100810231 step: 19460 | loss: 0.098225312 step: 19470 | loss: 0.095640394 step: 19480 | loss: 0.093055476 step: 19490 | loss: 0.090470557 step: 19500 | loss: 0.087885639 step: 19510 | loss: 0.085300720 step: 19520 | loss: 0.082715802 step: 19530 | loss: 0.080130883 step: 19540 | loss: 0.077545965 step: 19550 | loss: 0.074961046 step: 19560 | loss: 0.072376128 step: 19570 | loss: 0.069791210 step: 19580 | loss: 0.067206291 step: 19590 | loss: 0.064621373 step: 19600 | loss: 0.062036454 step: 19610 | loss: 0.059451536 step: 19620 | loss: 0.056866617 step: 19630 | loss: 0.054281699 step: 19640 | loss: 0.051696780 step: 19650 | loss: 0.049111862 step: 19660 | loss: 0.046526944 step: 19670 | loss: 0.043942025 step: 19680 | loss: 0.041357107 step: 19690 | loss: 0.038772188 step: 19700 | loss: 0.036187270 step: 19710 | loss: 0.033602351 step: 19720 | loss: 0.031017433 step: 19730 | loss: 0.028432515 step: 19740 | loss: 0.025847596 step: 19750 | loss: 0.023262678 step: 19760 | loss: 0.020677759 step: 19770 | loss: 0.018092841 step: 19780 | loss: 0.015507922 step: 19790 | loss: 0.012923004 step: 19800 | loss: 0.010338085 step: 19810 | loss: 0.007753167 step: 19820 | loss: 0.005168249 step: 19830 | loss: 0.002583330 - final loss: 0.000774 - (cd _build/default/examples/opt && ./adam.exe) - step: 0 | loss: 5.838398098 step: 10 | loss: 5.834887326 step: 20 | loss: 5.830990134 step: 30 | loss: 5.827097205 step: 40 | loss: 5.823208786 step: 50 | loss: 5.819324981 step: 60 | loss: 5.815445793 step: 70 | loss: 5.811571171 step: 80 | loss: 5.807701037 step: 90 | loss: 5.803835301 step: 100 | loss: 5.799973867 step: 110 | loss: 5.796116639 step: 120 | loss: 5.792263520 step: 130 | loss: 5.788414414 step: 140 | loss: 5.784569227 step: 150 | loss: 5.780727866 step: 160 | loss: 5.776890243 step: 170 | loss: 5.773056269 step: 180 | loss: 5.769225860 step: 190 | loss: 5.765398935 step: 200 | loss: 5.761575415 step: 210 | loss: 5.757755226 step: 220 | loss: 5.753938297 step: 230 | loss: 5.750124560 step: 240 | loss: 5.746313952 step: 250 | loss: 5.742506412 step: 260 | loss: 5.738701883 step: 270 | loss: 5.734900313 step: 280 | loss: 5.731101651 step: 290 | loss: 5.727305851 step: 300 | loss: 5.723512869 step: 310 | loss: 5.719722663 step: 320 | loss: 5.715935196 step: 330 | loss: 5.712150431 step: 340 | loss: 5.708368334 step: 350 | loss: 5.704588872 step: 360 | loss: 5.700812014 step: 370 | loss: 5.697037732 step: 380 | loss: 5.693265998 step: 390 | loss: 5.689496784 step: 400 | loss: 5.685730064 step: 410 | loss: 5.681965814 step: 420 | loss: 5.678204009 step: 430 | loss: 5.674444625 step: 440 | loss: 5.670687639 step: 450 | loss: 5.666933027 step: 460 | loss: 5.663180767 step: 470 | loss: 5.659430836 step: 480 | loss: 5.655683213 step: 490 | loss: 5.651937875 step: 500 | loss: 5.648194800 step: 510 | loss: 5.644453967 step: 520 | loss: 5.640715354 step: 530 | loss: 5.636978939 step: 540 | loss: 5.633244701 step: 550 | loss: 5.629512618 step: 560 | loss: 5.625782670 step: 570 | loss: 5.622054834 step: 580 | loss: 5.618329090 step: 590 | loss: 5.614605416 step: 600 | loss: 5.610883791 step: 610 | loss: 5.607164194 step: 620 | loss: 5.603446604 step: 630 | loss: 5.599731000 step: 640 | loss: 5.596017361 step: 650 | loss: 5.592305665 step: 660 | loss: 5.588595893 step: 670 | loss: 5.584888022 step: 680 | loss: 5.581182033 step: 690 | loss: 5.577477905 step: 700 | loss: 5.573775617 step: 710 | loss: 5.570075147 step: 720 | loss: 5.566376477 step: 730 | loss: 5.562679585 step: 740 | loss: 5.558984451 step: 750 | loss: 5.555291055 step: 760 | loss: 5.551599376 step: 770 | loss: 5.547909394 step: 780 | loss: 5.544221089 step: 790 | loss: 5.540534442 step: 800 | loss: 5.536849431 step: 810 | loss: 5.533166038 step: 820 | loss: 5.529484242 step: 830 | loss: 5.525804025 step: 840 | loss: 5.522125366 step: 850 | loss: 5.518448246 step: 860 | loss: 5.514772645 step: 870 | loss: 5.511098545 step: 880 | loss: 5.507425927 step: 890 | loss: 5.503754771 step: 900 | loss: 5.500085058 step: 910 | loss: 5.496416770 step: 920 | loss: 5.492749888 step: 930 | loss: 5.489084394 step: 940 | loss: 5.485420269 step: 950 | loss: 5.481757494 step: 960 | loss: 5.478096053 step: 970 | loss: 5.474435926 step: 980 | loss: 5.470777097 step: 990 | loss: 5.467119546 step: 1000 | loss: 5.463463258 step: 1010 | loss: 5.459808213 step: 1020 | loss: 5.456154396 step: 1030 | loss: 5.452501788 step: 1040 | loss: 5.448850373 step: 1050 | loss: 5.445200134 step: 1060 | loss: 5.441551055 step: 1070 | loss: 5.437903118 step: 1080 | loss: 5.434256308 step: 1090 | loss: 5.430610608 step: 1100 | loss: 5.426966002 step: 1110 | loss: 5.423322475 step: 1120 | loss: 5.419680010 step: 1130 | loss: 5.416038593 step: 1140 | loss: 5.412398207 step: 1150 | loss: 5.408758838 step: 1160 | loss: 5.405120470 step: 1170 | loss: 5.401483089 step: 1180 | loss: 5.397846680 step: 1190 | loss: 5.394211228 step: 1200 | loss: 5.390576720 step: 1210 | loss: 5.386943140 step: 1220 | loss: 5.383310476 step: 1230 | loss: 5.379678713 step: 1240 | loss: 5.376047837 step: 1250 | loss: 5.372417835 step: 1260 | loss: 5.368788694 step: 1270 | loss: 5.365160401 step: 1280 | loss: 5.361532943 step: 1290 | loss: 5.357906306 step: 1300 | loss: 5.354280479 step: 1310 | loss: 5.350655450 step: 1320 | loss: 5.347031205 step: 1330 | loss: 5.343407733 step: 1340 | loss: 5.339785022 step: 1350 | loss: 5.336163061 step: 1360 | loss: 5.332541838 step: 1370 | loss: 5.328921341 step: 1380 | loss: 5.325301559 step: 1390 | loss: 5.321682482 step: 1400 | loss: 5.318064099 step: 1410 | loss: 5.314446399 step: 1420 | loss: 5.310829371 step: 1430 | loss: 5.307213006 step: 1440 | loss: 5.303597293 step: 1450 | loss: 5.299982222 step: 1460 | loss: 5.296367784 step: 1470 | loss: 5.292753969 step: 1480 | loss: 5.289140768 step: 1490 | loss: 5.285528170 step: 1500 | loss: 5.281916168 step: 1510 | loss: 5.278304752 step: 1520 | loss: 5.274693914 step: 1530 | loss: 5.271083645 step: 1540 | loss: 5.267473935 step: 1550 | loss: 5.263864778 step: 1560 | loss: 5.260256165 step: 1570 | loss: 5.256648088 step: 1580 | loss: 5.253040539 step: 1590 | loss: 5.249433510 step: 1600 | loss: 5.245826995 step: 1610 | loss: 5.242220984 step: 1620 | loss: 5.238615472 step: 1630 | loss: 5.235010451 step: 1640 | loss: 5.231405914 step: 1650 | loss: 5.227801855 step: 1660 | loss: 5.224198266 step: 1670 | loss: 5.220595141 step: 1680 | loss: 5.216992473 step: 1690 | loss: 5.213390258 step: 1700 | loss: 5.209788487 step: 1710 | loss: 5.206187156 step: 1720 | loss: 5.202586257 step: 1730 | loss: 5.198985787 step: 1740 | loss: 5.195385738 step: 1750 | loss: 5.191786106 step: 1760 | loss: 5.188186885 step: 1770 | loss: 5.184588069 step: 1780 | loss: 5.180989654 step: 1790 | loss: 5.177391635 step: 1800 | loss: 5.173794006 step: 1810 | loss: 5.170196763 step: 1820 | loss: 5.166599901 step: 1830 | loss: 5.163003415 step: 1840 | loss: 5.159407302 step: 1850 | loss: 5.155811556 step: 1860 | loss: 5.152216174 step: 1870 | loss: 5.148621151 step: 1880 | loss: 5.145026483 step: 1890 | loss: 5.141432166 step: 1900 | loss: 5.137838197 step: 1910 | loss: 5.134244571 step: 1920 | loss: 5.130651285 step: 1930 | loss: 5.127058335 step: 1940 | loss: 5.123465718 step: 1950 | loss: 5.119873431 step: 1960 | loss: 5.116281470 step: 1970 | loss: 5.112689832 step: 1980 | loss: 5.109098513 step: 1990 | loss: 5.105507512 step: 2000 | loss: 5.101916824 step: 2010 | loss: 5.098326447 step: 2020 | loss: 5.094736379 step: 2030 | loss: 5.091146616 step: 2040 | loss: 5.087557156 step: 2050 | loss: 5.083967996 step: 2060 | loss: 5.080379134 step: 2070 | loss: 5.076790567 step: 2080 | loss: 5.073202293 step: 2090 | loss: 5.069614311 step: 2100 | loss: 5.066026617 step: 2110 | loss: 5.062439209 step: 2120 | loss: 5.058852086 step: 2130 | loss: 5.055265246 step: 2140 | loss: 5.051678686 step: 2150 | loss: 5.048092405 step: 2160 | loss: 5.044506401 step: 2170 | loss: 5.040920673 step: 2180 | loss: 5.037335217 step: 2190 | loss: 5.033750034 step: 2200 | loss: 5.030165122 step: 2210 | loss: 5.026580478 step: 2220 | loss: 5.022996102 step: 2230 | loss: 5.019411992 step: 2240 | loss: 5.015828146 step: 2250 | loss: 5.012244565 step: 2260 | loss: 5.008661245 step: 2270 | loss: 5.005078187 step: 2280 | loss: 5.001495389 step: 2290 | loss: 4.997912850 step: 2300 | loss: 4.994330569 step: 2310 | loss: 4.990748545 step: 2320 | loss: 4.987166776 step: 2330 | loss: 4.983585263 step: 2340 | loss: 4.980004005 step: 2350 | loss: 4.976422999 step: 2360 | loss: 4.972842247 step: 2370 | loss: 4.969261746 step: 2380 | loss: 4.965681497 step: 2390 | loss: 4.962101498 step: 2400 | loss: 4.958521749 step: 2410 | loss: 4.954942250 step: 2420 | loss: 4.951362999 step: 2430 | loss: 4.947783997 step: 2440 | loss: 4.944205243 step: 2450 | loss: 4.940626736 step: 2460 | loss: 4.937048476 step: 2470 | loss: 4.933470463 step: 2480 | loss: 4.929892696 step: 2490 | loss: 4.926315175 step: 2500 | loss: 4.922737900 step: 2510 | loss: 4.919160870 step: 2520 | loss: 4.915584084 step: 2530 | loss: 4.912007544 step: 2540 | loss: 4.908431248 step: 2550 | loss: 4.904855197 step: 2560 | loss: 4.901279390 step: 2570 | loss: 4.897703827 step: 2580 | loss: 4.894128509 step: 2590 | loss: 4.890553434 step: 2600 | loss: 4.886978603 step: 2610 | loss: 4.883404015 step: 2620 | loss: 4.879829672 step: 2630 | loss: 4.876255572 step: 2640 | loss: 4.872681716 step: 2650 | loss: 4.869108103 step: 2660 | loss: 4.865534735 step: 2670 | loss: 4.861961610 step: 2680 | loss: 4.858388729 step: 2690 | loss: 4.854816092 step: 2700 | loss: 4.851243699 step: 2710 | loss: 4.847671550 step: 2720 | loss: 4.844099645 step: 2730 | loss: 4.840527984 step: 2740 | loss: 4.836956568 step: 2750 | loss: 4.833385397 step: 2760 | loss: 4.829814471 step: 2770 | loss: 4.826243789 step: 2780 | loss: 4.822673353 step: 2790 | loss: 4.819103162 step: 2800 | loss: 4.815533216 step: 2810 | loss: 4.811963516 step: 2820 | loss: 4.808394062 step: 2830 | loss: 4.804824855 step: 2840 | loss: 4.801255894 step: 2850 | loss: 4.797687179 step: 2860 | loss: 4.794118711 step: 2870 | loss: 4.790550491 step: 2880 | loss: 4.786982518 step: 2890 | loss: 4.783414793 step: 2900 | loss: 4.779847315 step: 2910 | loss: 4.776280086 step: 2920 | loss: 4.772713106 step: 2930 | loss: 4.769146375 step: 2940 | loss: 4.765579892 step: 2950 | loss: 4.762013659 step: 2960 | loss: 4.758447676 step: 2970 | loss: 4.754881943 step: 2980 | loss: 4.751316461 step: 2990 | loss: 4.747751229 step: 3000 | loss: 4.744186249 step: 3010 | loss: 4.740621520 step: 3020 | loss: 4.737057042 step: 3030 | loss: 4.733492817 step: 3040 | loss: 4.729928845 step: 3050 | loss: 4.726365125 step: 3060 | loss: 4.722801658 step: 3070 | loss: 4.719238446 step: 3080 | loss: 4.715675487 step: 3090 | loss: 4.712112782 step: 3100 | loss: 4.708550332 step: 3110 | loss: 4.704988137 step: 3120 | loss: 4.701426198 step: 3130 | loss: 4.697864514 step: 3140 | loss: 4.694303087 step: 3150 | loss: 4.690741916 step: 3160 | loss: 4.687181002 step: 3170 | loss: 4.683620346 step: 3180 | loss: 4.680059948 step: 3190 | loss: 4.676499807 step: 3200 | loss: 4.672939926 step: 3210 | loss: 4.669380303 step: 3220 | loss: 4.665820940 step: 3230 | loss: 4.662261837 step: 3240 | loss: 4.658702994 step: 3250 | loss: 4.655144411 step: 3260 | loss: 4.651586090 step: 3270 | loss: 4.648028031 step: 3280 | loss: 4.644470233 step: 3290 | loss: 4.640912698 step: 3300 | loss: 4.637355425 step: 3310 | loss: 4.633798416 step: 3320 | loss: 4.630241671 step: 3330 | loss: 4.626685189 step: 3340 | loss: 4.623128972 step: 3350 | loss: 4.619573021 step: 3360 | loss: 4.616017334 step: 3370 | loss: 4.612461914 step: 3380 | loss: 4.608906760 step: 3390 | loss: 4.605351873 step: 3400 | loss: 4.601797253 step: 3410 | loss: 4.598242901 step: 3420 | loss: 4.594688817 step: 3430 | loss: 4.591135002 step: 3440 | loss: 4.587581455 step: 3450 | loss: 4.584028179 step: 3460 | loss: 4.580475172 step: 3470 | loss: 4.576922436 step: 3480 | loss: 4.573369971 step: 3490 | loss: 4.569817778 step: 3500 | loss: 4.566265856 step: 3510 | loss: 4.562714207 step: 3520 | loss: 4.559162830 step: 3530 | loss: 4.555611728 step: 3540 | loss: 4.552060898 step: 3550 | loss: 4.548510344 step: 3560 | loss: 4.544960064 step: 3570 | loss: 4.541410059 step: 3580 | loss: 4.537860331 step: 3590 | loss: 4.534310879 step: 3600 | loss: 4.530761703 step: 3610 | loss: 4.527212805 step: 3620 | loss: 4.523664185 step: 3630 | loss: 4.520115843 step: 3640 | loss: 4.516567779 step: 3650 | loss: 4.513019996 step: 3660 | loss: 4.509472492 step: 3670 | loss: 4.505925268 step: 3680 | loss: 4.502378326 step: 3690 | loss: 4.498831664 step: 3700 | loss: 4.495285285 step: 3710 | loss: 4.491739188 step: 3720 | loss: 4.488193374 step: 3730 | loss: 4.484647844 step: 3740 | loss: 4.481102598 step: 3750 | loss: 4.477557636 step: 3760 | loss: 4.474012959 step: 3770 | loss: 4.470468568 step: 3780 | loss: 4.466924463 step: 3790 | loss: 4.463380645 step: 3800 | loss: 4.459837114 step: 3810 | loss: 4.456293871 step: 3820 | loss: 4.452750916 step: 3830 | loss: 4.449208251 step: 3840 | loss: 4.445665874 step: 3850 | loss: 4.442123788 step: 3860 | loss: 4.438581992 step: 3870 | loss: 4.435040487 step: 3880 | loss: 4.431499274 step: 3890 | loss: 4.427958353 step: 3900 | loss: 4.424417724 step: 3910 | loss: 4.420877390 step: 3920 | loss: 4.417337349 step: 3930 | loss: 4.413797602 step: 3940 | loss: 4.410258151 step: 3950 | loss: 4.406718995 step: 3960 | loss: 4.403180135 step: 3970 | loss: 4.399641572 step: 3980 | loss: 4.396103307 step: 3990 | loss: 4.392565339 step: 4000 | loss: 4.389027670 step: 4010 | loss: 4.385490300 step: 4020 | loss: 4.381953230 step: 4030 | loss: 4.378416460 step: 4040 | loss: 4.374879991 step: 4050 | loss: 4.371343824 step: 4060 | loss: 4.367807958 step: 4070 | loss: 4.364272395 step: 4080 | loss: 4.360737136 step: 4090 | loss: 4.357202180 step: 4100 | loss: 4.353667529 step: 4110 | loss: 4.350133183 step: 4120 | loss: 4.346599142 step: 4130 | loss: 4.343065408 step: 4140 | loss: 4.339531981 step: 4150 | loss: 4.335998862 step: 4160 | loss: 4.332466050 step: 4170 | loss: 4.328933548 step: 4180 | loss: 4.325401354 step: 4190 | loss: 4.321869471 step: 4200 | loss: 4.318337899 step: 4210 | loss: 4.314806638 step: 4220 | loss: 4.311275689 step: 4230 | loss: 4.307745052 step: 4240 | loss: 4.304214729 step: 4250 | loss: 4.300684719 step: 4260 | loss: 4.297155024 step: 4270 | loss: 4.293625644 step: 4280 | loss: 4.290096580 step: 4290 | loss: 4.286567832 step: 4300 | loss: 4.283039402 step: 4310 | loss: 4.279511289 step: 4320 | loss: 4.275983494 step: 4330 | loss: 4.272456019 step: 4340 | loss: 4.268928863 step: 4350 | loss: 4.265402028 step: 4360 | loss: 4.261875514 step: 4370 | loss: 4.258349321 step: 4380 | loss: 4.254823451 step: 4390 | loss: 4.251297904 step: 4400 | loss: 4.247772681 step: 4410 | loss: 4.244247782 step: 4420 | loss: 4.240723208 step: 4430 | loss: 4.237198960 step: 4440 | loss: 4.233675039 step: 4450 | loss: 4.230151444 step: 4460 | loss: 4.226628178 step: 4470 | loss: 4.223105240 step: 4480 | loss: 4.219582631 step: 4490 | loss: 4.216060352 step: 4500 | loss: 4.212538404 step: 4510 | loss: 4.209016787 step: 4520 | loss: 4.205495502 step: 4530 | loss: 4.201974549 step: 4540 | loss: 4.198453931 step: 4550 | loss: 4.194933646 step: 4560 | loss: 4.191413696 step: 4570 | loss: 4.187894082 step: 4580 | loss: 4.184374804 step: 4590 | loss: 4.180855863 step: 4600 | loss: 4.177337260 step: 4610 | loss: 4.173818996 step: 4620 | loss: 4.170301070 step: 4630 | loss: 4.166783484 step: 4640 | loss: 4.163266240 step: 4650 | loss: 4.159749336 step: 4660 | loss: 4.156232775 step: 4670 | loss: 4.152716556 step: 4680 | loss: 4.149200681 step: 4690 | loss: 4.145685151 step: 4700 | loss: 4.142169965 step: 4710 | loss: 4.138655126 step: 4720 | loss: 4.135140633 step: 4730 | loss: 4.131626487 step: 4740 | loss: 4.128112689 step: 4750 | loss: 4.124599241 step: 4760 | loss: 4.121086142 step: 4770 | loss: 4.117573393 step: 4780 | loss: 4.114060996 step: 4790 | loss: 4.110548950 step: 4800 | loss: 4.107037257 step: 4810 | loss: 4.103525918 step: 4820 | loss: 4.100014933 step: 4830 | loss: 4.096504303 step: 4840 | loss: 4.092994029 step: 4850 | loss: 4.089484111 step: 4860 | loss: 4.085974551 step: 4870 | loss: 4.082465349 step: 4880 | loss: 4.078956506 step: 4890 | loss: 4.075448023 step: 4900 | loss: 4.071939901 step: 4910 | loss: 4.068432140 step: 4920 | loss: 4.064924741 step: 4930 | loss: 4.061417705 step: 4940 | loss: 4.057911033 step: 4950 | loss: 4.054404725 step: 4960 | loss: 4.050898783 step: 4970 | loss: 4.047393207 step: 4980 | loss: 4.043887998 step: 4990 | loss: 4.040383157 step: 5000 | loss: 4.036878685 step: 5010 | loss: 4.033374582 step: 5020 | loss: 4.029870850 step: 5030 | loss: 4.026367489 step: 5040 | loss: 4.022864500 step: 5050 | loss: 4.019361884 step: 5060 | loss: 4.015859642 step: 5070 | loss: 4.012357774 step: 5080 | loss: 4.008856281 step: 5090 | loss: 4.005355165 step: 5100 | loss: 4.001854426 step: 5110 | loss: 3.998354065 step: 5120 | loss: 3.994854083 step: 5130 | loss: 3.991354480 step: 5140 | loss: 3.987855258 step: 5150 | loss: 3.984356418 step: 5160 | loss: 3.980857959 step: 5170 | loss: 3.977359884 step: 5180 | loss: 3.973862193 step: 5190 | loss: 3.970364887 step: 5200 | loss: 3.966867966 step: 5210 | loss: 3.963371433 step: 5220 | loss: 3.959875287 step: 5230 | loss: 3.956379529 step: 5240 | loss: 3.952884160 step: 5250 | loss: 3.949389182 step: 5260 | loss: 3.945894595 step: 5270 | loss: 3.942400400 step: 5280 | loss: 3.938906598 step: 5290 | loss: 3.935413190 step: 5300 | loss: 3.931920176 step: 5310 | loss: 3.928427559 step: 5320 | loss: 3.924935337 step: 5330 | loss: 3.921443514 step: 5340 | loss: 3.917952088 step: 5350 | loss: 3.914461062 step: 5360 | loss: 3.910970436 step: 5370 | loss: 3.907480212 step: 5380 | loss: 3.903990389 step: 5390 | loss: 3.900500970 step: 5400 | loss: 3.897011955 step: 5410 | loss: 3.893523344 step: 5420 | loss: 3.890035140 step: 5430 | loss: 3.886547342 step: 5440 | loss: 3.883059952 step: 5450 | loss: 3.879572971 step: 5460 | loss: 3.876086399 step: 5470 | loss: 3.872600238 step: 5480 | loss: 3.869114489 step: 5490 | loss: 3.865629152 step: 5500 | loss: 3.862144229 step: 5510 | loss: 3.858659720 step: 5520 | loss: 3.855175627 step: 5530 | loss: 3.851691950 step: 5540 | loss: 3.848208690 step: 5550 | loss: 3.844725849 step: 5560 | loss: 3.841243428 step: 5570 | loss: 3.837761427 step: 5580 | loss: 3.834279847 step: 5590 | loss: 3.830798690 step: 5600 | loss: 3.827317956 step: 5610 | loss: 3.823837646 step: 5620 | loss: 3.820357762 step: 5630 | loss: 3.816878304 step: 5640 | loss: 3.813399274 step: 5650 | loss: 3.809920672 step: 5660 | loss: 3.806442499 step: 5670 | loss: 3.802964757 step: 5680 | loss: 3.799487446 step: 5690 | loss: 3.796010568 step: 5700 | loss: 3.792534124 step: 5710 | loss: 3.789058114 step: 5720 | loss: 3.785582539 step: 5730 | loss: 3.782107402 step: 5740 | loss: 3.778632702 step: 5750 | loss: 3.775158440 step: 5760 | loss: 3.771684619 step: 5770 | loss: 3.768211238 step: 5780 | loss: 3.764738300 step: 5790 | loss: 3.761265804 step: 5800 | loss: 3.757793752 step: 5810 | loss: 3.754322145 step: 5820 | loss: 3.750850985 step: 5830 | loss: 3.747380272 step: 5840 | loss: 3.743910007 step: 5850 | loss: 3.740440191 step: 5860 | loss: 3.736970826 step: 5870 | loss: 3.733501913 step: 5880 | loss: 3.730033452 step: 5890 | loss: 3.726565445 step: 5900 | loss: 3.723097893 step: 5910 | loss: 3.719630796 step: 5920 | loss: 3.716164157 step: 5930 | loss: 3.712697976 step: 5940 | loss: 3.709232254 step: 5950 | loss: 3.705766993 step: 5960 | loss: 3.702302193 step: 5970 | loss: 3.698837855 step: 5980 | loss: 3.695373982 step: 5990 | loss: 3.691910573 step: 6000 | loss: 3.688447630 step: 6010 | loss: 3.684985155 step: 6020 | loss: 3.681523147 step: 6030 | loss: 3.678061609 step: 6040 | loss: 3.674600542 step: 6050 | loss: 3.671139946 step: 6060 | loss: 3.667679823 step: 6070 | loss: 3.664220175 step: 6080 | loss: 3.660761001 step: 6090 | loss: 3.657302304 step: 6100 | loss: 3.653844084 step: 6110 | loss: 3.650386343 step: 6120 | loss: 3.646929081 step: 6130 | loss: 3.643472301 step: 6140 | loss: 3.640016003 step: 6150 | loss: 3.636560188 step: 6160 | loss: 3.633104858 step: 6170 | loss: 3.629650014 step: 6180 | loss: 3.626195657 step: 6190 | loss: 3.622741788 step: 6200 | loss: 3.619288408 step: 6210 | loss: 3.615835519 step: 6220 | loss: 3.612383121 step: 6230 | loss: 3.608931217 step: 6240 | loss: 3.605479806 step: 6250 | loss: 3.602028892 step: 6260 | loss: 3.598578473 step: 6270 | loss: 3.595128553 step: 6280 | loss: 3.591679132 step: 6290 | loss: 3.588230211 step: 6300 | loss: 3.584781792 step: 6310 | loss: 3.581333876 step: 6320 | loss: 3.577886464 step: 6330 | loss: 3.574439557 step: 6340 | loss: 3.570993157 step: 6350 | loss: 3.567547264 step: 6360 | loss: 3.564101881 step: 6370 | loss: 3.560657008 step: 6380 | loss: 3.557212647 step: 6390 | loss: 3.553768799 step: 6400 | loss: 3.550325465 step: 6410 | loss: 3.546882646 step: 6420 | loss: 3.543440344 step: 6430 | loss: 3.539998561 step: 6440 | loss: 3.536557297 step: 6450 | loss: 3.533116553 step: 6460 | loss: 3.529676331 step: 6470 | loss: 3.526236633 step: 6480 | loss: 3.522797459 step: 6490 | loss: 3.519358811 step: 6500 | loss: 3.515920691 step: 6510 | loss: 3.512483099 step: 6520 | loss: 3.509046037 step: 6530 | loss: 3.505609506 step: 6540 | loss: 3.502173507 step: 6550 | loss: 3.498738043 step: 6560 | loss: 3.495303113 step: 6570 | loss: 3.491868721 step: 6580 | loss: 3.488434866 step: 6590 | loss: 3.485001550 step: 6600 | loss: 3.481568775 step: 6610 | loss: 3.478136542 step: 6620 | loss: 3.474704853 step: 6630 | loss: 3.471273708 step: 6640 | loss: 3.467843109 step: 6650 | loss: 3.464413058 step: 6660 | loss: 3.460983556 step: 6670 | loss: 3.457554604 step: 6680 | loss: 3.454126204 step: 6690 | loss: 3.450698356 step: 6700 | loss: 3.447271063 step: 6710 | loss: 3.443844327 step: 6720 | loss: 3.440418147 step: 6730 | loss: 3.436992526 step: 6740 | loss: 3.433567465 step: 6750 | loss: 3.430142966 step: 6760 | loss: 3.426719029 step: 6770 | loss: 3.423295657 step: 6780 | loss: 3.419872851 step: 6790 | loss: 3.416450612 step: 6800 | loss: 3.413028942 step: 6810 | loss: 3.409607842 step: 6820 | loss: 3.406187313 step: 6830 | loss: 3.402767358 step: 6840 | loss: 3.399347977 step: 6850 | loss: 3.395929172 step: 6860 | loss: 3.392510945 step: 6870 | loss: 3.389093296 step: 6880 | loss: 3.385676228 step: 6890 | loss: 3.382259742 step: 6900 | loss: 3.378843840 step: 6910 | loss: 3.375428522 step: 6920 | loss: 3.372013790 step: 6930 | loss: 3.368599647 step: 6940 | loss: 3.365186093 step: 6950 | loss: 3.361773129 step: 6960 | loss: 3.358360759 step: 6970 | loss: 3.354948982 step: 6980 | loss: 3.351537800 step: 6990 | loss: 3.348127216 step: 7000 | loss: 3.344717230 step: 7010 | loss: 3.341307844 step: 7020 | loss: 3.337899060 step: 7030 | loss: 3.334490879 step: 7040 | loss: 3.331083302 step: 7050 | loss: 3.327676332 step: 7060 | loss: 3.324269970 step: 7070 | loss: 3.320864217 step: 7080 | loss: 3.317459075 step: 7090 | loss: 3.314054545 step: 7100 | loss: 3.310650630 step: 7110 | loss: 3.307247330 step: 7120 | loss: 3.303844648 step: 7130 | loss: 3.300442585 step: 7140 | loss: 3.297041142 step: 7150 | loss: 3.293640321 step: 7160 | loss: 3.290240124 step: 7170 | loss: 3.286840552 step: 7180 | loss: 3.283441607 step: 7190 | loss: 3.280043290 step: 7200 | loss: 3.276645604 step: 7210 | loss: 3.273248550 step: 7220 | loss: 3.269852129 step: 7230 | loss: 3.266456343 step: 7240 | loss: 3.263061194 step: 7250 | loss: 3.259666683 step: 7260 | loss: 3.256272812 step: 7270 | loss: 3.252879583 step: 7280 | loss: 3.249486998 step: 7290 | loss: 3.246095057 step: 7300 | loss: 3.242703763 step: 7310 | loss: 3.239313118 step: 7320 | loss: 3.235923122 step: 7330 | loss: 3.232533779 step: 7340 | loss: 3.229145089 step: 7350 | loss: 3.225757054 step: 7360 | loss: 3.222369676 step: 7370 | loss: 3.218982956 step: 7380 | loss: 3.215596897 step: 7390 | loss: 3.212211500 step: 7400 | loss: 3.208826767 step: 7410 | loss: 3.205442699 step: 7420 | loss: 3.202059298 step: 7430 | loss: 3.198676566 step: 7440 | loss: 3.195294505 step: 7450 | loss: 3.191913117 step: 7460 | loss: 3.188532403 step: 7470 | loss: 3.185152364 step: 7480 | loss: 3.181773004 step: 7490 | loss: 3.178394323 step: 7500 | loss: 3.175016323 step: 7510 | loss: 3.171639006 step: 7520 | loss: 3.168262374 step: 7530 | loss: 3.164886429 step: 7540 | loss: 3.161511173 step: 7550 | loss: 3.158136606 step: 7560 | loss: 3.154762732 step: 7570 | loss: 3.151389552 step: 7580 | loss: 3.148017067 step: 7590 | loss: 3.144645280 step: 7600 | loss: 3.141274192 step: 7610 | loss: 3.137903805 step: 7620 | loss: 3.134534121 step: 7630 | loss: 3.131165143 step: 7640 | loss: 3.127796871 step: 7650 | loss: 3.124429307 step: 7660 | loss: 3.121062454 step: 7670 | loss: 3.117696313 step: 7680 | loss: 3.114330887 step: 7690 | loss: 3.110966176 step: 7700 | loss: 3.107602184 step: 7710 | loss: 3.104238911 step: 7720 | loss: 3.100876360 step: 7730 | loss: 3.097514533 step: 7740 | loss: 3.094153431 step: 7750 | loss: 3.090793057 step: 7760 | loss: 3.087433412 step: 7770 | loss: 3.084074498 step: 7780 | loss: 3.080716317 step: 7790 | loss: 3.077358872 step: 7800 | loss: 3.074002164 step: 7810 | loss: 3.070646194 step: 7820 | loss: 3.067290966 step: 7830 | loss: 3.063936480 step: 7840 | loss: 3.060582739 step: 7850 | loss: 3.057229746 step: 7860 | loss: 3.053877501 step: 7870 | loss: 3.050526006 step: 7880 | loss: 3.047175265 step: 7890 | loss: 3.043825278 step: 7900 | loss: 3.040476048 step: 7910 | loss: 3.037127577 step: 7920 | loss: 3.033779866 step: 7930 | loss: 3.030432918 step: 7940 | loss: 3.027086735 step: 7950 | loss: 3.023741319 step: 7960 | loss: 3.020396671 step: 7970 | loss: 3.017052794 step: 7980 | loss: 3.013709690 step: 7990 | loss: 3.010367361 step: 8000 | loss: 3.007025809 step: 8010 | loss: 3.003685036 step: 8020 | loss: 3.000345044 step: 8030 | loss: 2.997005836 step: 8040 | loss: 2.993667412 step: 8050 | loss: 2.990329776 step: 8060 | loss: 2.986992929 step: 8070 | loss: 2.983656873 step: 8080 | loss: 2.980321611 step: 8090 | loss: 2.976987144 step: 8100 | loss: 2.973653476 step: 8110 | loss: 2.970320607 step: 8120 | loss: 2.966988540 step: 8130 | loss: 2.963657277 step: 8140 | loss: 2.960326821 step: 8150 | loss: 2.956997172 step: 8160 | loss: 2.953668334 step: 8170 | loss: 2.950340309 step: 8180 | loss: 2.947013099 step: 8190 | loss: 2.943686705 step: 8200 | loss: 2.940361131 step: 8210 | loss: 2.937036378 step: 8220 | loss: 2.933712448 step: 8230 | loss: 2.930389344 step: 8240 | loss: 2.927067067 step: 8250 | loss: 2.923745621 step: 8260 | loss: 2.920425007 step: 8270 | loss: 2.917105227 step: 8280 | loss: 2.913786284 step: 8290 | loss: 2.910468179 step: 8300 | loss: 2.907150916 step: 8310 | loss: 2.903834496 step: 8320 | loss: 2.900518921 step: 8330 | loss: 2.897204194 step: 8340 | loss: 2.893890317 step: 8350 | loss: 2.890577292 step: 8360 | loss: 2.887265121 step: 8370 | loss: 2.883953807 step: 8380 | loss: 2.880643353 step: 8390 | loss: 2.877333759 step: 8400 | loss: 2.874025029 step: 8410 | loss: 2.870717165 step: 8420 | loss: 2.867410169 step: 8430 | loss: 2.864104043 step: 8440 | loss: 2.860798790 step: 8450 | loss: 2.857494412 step: 8460 | loss: 2.854190911 step: 8470 | loss: 2.850888289 step: 8480 | loss: 2.847586550 step: 8490 | loss: 2.844285695 step: 8500 | loss: 2.840985727 step: 8510 | loss: 2.837686647 step: 8520 | loss: 2.834388459 step: 8530 | loss: 2.831091164 step: 8540 | loss: 2.827794766 step: 8550 | loss: 2.824499266 step: 8560 | loss: 2.821204666 step: 8570 | loss: 2.817910970 step: 8580 | loss: 2.814618179 step: 8590 | loss: 2.811326296 step: 8600 | loss: 2.808035323 step: 8610 | loss: 2.804745263 step: 8620 | loss: 2.801456118 step: 8630 | loss: 2.798167890 step: 8640 | loss: 2.794880583 step: 8650 | loss: 2.791594197 step: 8660 | loss: 2.788308736 step: 8670 | loss: 2.785024203 step: 8680 | loss: 2.781740599 step: 8690 | loss: 2.778457927 step: 8700 | loss: 2.775176190 step: 8710 | loss: 2.771895390 step: 8720 | loss: 2.768615529 step: 8730 | loss: 2.765336610 step: 8740 | loss: 2.762058635 step: 8750 | loss: 2.758781607 step: 8760 | loss: 2.755505528 step: 8770 | loss: 2.752230402 step: 8780 | loss: 2.748956229 step: 8790 | loss: 2.745683014 step: 8800 | loss: 2.742410757 step: 8810 | loss: 2.739139463 step: 8820 | loss: 2.735869133 step: 8830 | loss: 2.732599769 step: 8840 | loss: 2.729331375 step: 8850 | loss: 2.726063954 step: 8860 | loss: 2.722797506 step: 8870 | loss: 2.719532036 step: 8880 | loss: 2.716267545 step: 8890 | loss: 2.713004036 step: 8900 | loss: 2.709741512 step: 8910 | loss: 2.706479975 step: 8920 | loss: 2.703219428 step: 8930 | loss: 2.699959873 step: 8940 | loss: 2.696701314 step: 8950 | loss: 2.693443751 step: 8960 | loss: 2.690187190 step: 8970 | loss: 2.686931630 step: 8980 | loss: 2.683677077 step: 8990 | loss: 2.680423531 step: 9000 | loss: 2.677170996 step: 9010 | loss: 2.673919474 step: 9020 | loss: 2.670668968 step: 9030 | loss: 2.667419480 step: 9040 | loss: 2.664171014 step: 9050 | loss: 2.660923571 step: 9060 | loss: 2.657677155 step: 9070 | loss: 2.654431768 step: 9080 | loss: 2.651187412 step: 9090 | loss: 2.647944091 step: 9100 | loss: 2.644701807 step: 9110 | loss: 2.641460563 step: 9120 | loss: 2.638220361 step: 9130 | loss: 2.634981204 step: 9140 | loss: 2.631743095 step: 9150 | loss: 2.628506037 step: 9160 | loss: 2.625270031 step: 9170 | loss: 2.622035082 step: 9180 | loss: 2.618801191 step: 9190 | loss: 2.615568361 step: 9200 | loss: 2.612336596 step: 9210 | loss: 2.609105897 step: 9220 | loss: 2.605876268 step: 9230 | loss: 2.602647711 step: 9240 | loss: 2.599420228 step: 9250 | loss: 2.596193824 step: 9260 | loss: 2.592968500 step: 9270 | loss: 2.589744259 step: 9280 | loss: 2.586521104 step: 9290 | loss: 2.583299038 step: 9300 | loss: 2.580078063 step: 9310 | loss: 2.576858183 step: 9320 | loss: 2.573639399 step: 9330 | loss: 2.570421715 step: 9340 | loss: 2.567205134 step: 9350 | loss: 2.563989659 step: 9360 | loss: 2.560775291 step: 9370 | loss: 2.557562034 step: 9380 | loss: 2.554349892 step: 9390 | loss: 2.551138865 step: 9400 | loss: 2.547928958 step: 9410 | loss: 2.544720173 step: 9420 | loss: 2.541512514 step: 9430 | loss: 2.538305982 step: 9440 | loss: 2.535100580 step: 9450 | loss: 2.531896312 step: 9460 | loss: 2.528693181 step: 9470 | loss: 2.525491188 step: 9480 | loss: 2.522290338 step: 9490 | loss: 2.519090632 step: 9500 | loss: 2.515892074 step: 9510 | loss: 2.512694667 step: 9520 | loss: 2.509498413 step: 9530 | loss: 2.506303315 step: 9540 | loss: 2.503109376 step: 9550 | loss: 2.499916600 step: 9560 | loss: 2.496724988 step: 9570 | loss: 2.493534543 step: 9580 | loss: 2.490345270 step: 9590 | loss: 2.487157169 step: 9600 | loss: 2.483970246 step: 9610 | loss: 2.480784501 step: 9620 | loss: 2.477599938 step: 9630 | loss: 2.474416561 step: 9640 | loss: 2.471234371 step: 9650 | loss: 2.468053372 step: 9660 | loss: 2.464873567 step: 9670 | loss: 2.461694958 step: 9680 | loss: 2.458517549 step: 9690 | loss: 2.455341342 step: 9700 | loss: 2.452166341 step: 9710 | loss: 2.448992547 step: 9720 | loss: 2.445819965 step: 9730 | loss: 2.442648597 step: 9740 | loss: 2.439478446 step: 9750 | loss: 2.436309515 step: 9760 | loss: 2.433141806 step: 9770 | loss: 2.429975323 step: 9780 | loss: 2.426810069 step: 9790 | loss: 2.423646046 step: 9800 | loss: 2.420483258 step: 9810 | loss: 2.417321708 step: 9820 | loss: 2.414161397 step: 9830 | loss: 2.411002330 step: 9840 | loss: 2.407844509 step: 9850 | loss: 2.404687938 step: 9860 | loss: 2.401532618 step: 9870 | loss: 2.398378554 step: 9880 | loss: 2.395225747 step: 9890 | loss: 2.392074201 step: 9900 | loss: 2.388923920 step: 9910 | loss: 2.385774905 step: 9920 | loss: 2.382627159 step: 9930 | loss: 2.379480687 step: 9940 | loss: 2.376335490 step: 9950 | loss: 2.373191571 step: 9960 | loss: 2.370048934 step: 9970 | loss: 2.366907582 step: 9980 | loss: 2.363767517 step: 9990 | loss: 2.360628742 step: 10000 | loss: 2.357491260 step: 10010 | loss: 2.354355075 step: 10020 | loss: 2.351220189 step: 10030 | loss: 2.348086604 step: 10040 | loss: 2.344954325 step: 10050 | loss: 2.341823354 step: 10060 | loss: 2.338693694 step: 10070 | loss: 2.335565348 step: 10080 | loss: 2.332438318 step: 10090 | loss: 2.329312608 step: 10100 | loss: 2.326188221 step: 10110 | loss: 2.323065159 step: 10120 | loss: 2.319943426 step: 10130 | loss: 2.316823024 step: 10140 | loss: 2.313703957 step: 10150 | loss: 2.310586227 step: 10160 | loss: 2.307469837 step: 10170 | loss: 2.304354790 step: 10180 | loss: 2.301241089 step: 10190 | loss: 2.298128737 step: 10200 | loss: 2.295017737 step: 10210 | loss: 2.291908092 step: 10220 | loss: 2.288799804 step: 10230 | loss: 2.285692877 step: 10240 | loss: 2.282587314 step: 10250 | loss: 2.279483117 step: 10260 | loss: 2.276380289 step: 10270 | loss: 2.273278833 step: 10280 | loss: 2.270178752 step: 10290 | loss: 2.267080050 step: 10300 | loss: 2.263982728 step: 10310 | loss: 2.260886789 step: 10320 | loss: 2.257792237 step: 10330 | loss: 2.254699075 step: 10340 | loss: 2.251607304 step: 10350 | loss: 2.248516929 step: 10360 | loss: 2.245427952 step: 10370 | loss: 2.242340375 step: 10380 | loss: 2.239254202 step: 10390 | loss: 2.236169436 step: 10400 | loss: 2.233086078 step: 10410 | loss: 2.230004133 step: 10420 | loss: 2.226923603 step: 10430 | loss: 2.223844490 step: 10440 | loss: 2.220766798 step: 10450 | loss: 2.217690529 step: 10460 | loss: 2.214615686 step: 10470 | loss: 2.211542272 step: 10480 | loss: 2.208470289 step: 10490 | loss: 2.205399741 step: 10500 | loss: 2.202330629 step: 10510 | loss: 2.199262958 step: 10520 | loss: 2.196196729 step: 10530 | loss: 2.193131946 step: 10540 | loss: 2.190068610 step: 10550 | loss: 2.187006725 step: 10560 | loss: 2.183946294 step: 10570 | loss: 2.180887318 step: 10580 | loss: 2.177829802 step: 10590 | loss: 2.174773747 step: 10600 | loss: 2.171719156 step: 10610 | loss: 2.168666031 step: 10620 | loss: 2.165614376 step: 10630 | loss: 2.162564193 step: 10640 | loss: 2.159515485 step: 10650 | loss: 2.156468254 step: 10660 | loss: 2.153422503 step: 10670 | loss: 2.150378234 step: 10680 | loss: 2.147335450 step: 10690 | loss: 2.144294153 step: 10700 | loss: 2.141254347 step: 10710 | loss: 2.138216033 step: 10720 | loss: 2.135179215 step: 10730 | loss: 2.132143894 step: 10740 | loss: 2.129110073 step: 10750 | loss: 2.126077754 step: 10760 | loss: 2.123046941 step: 10770 | loss: 2.120017635 step: 10780 | loss: 2.116989839 step: 10790 | loss: 2.113963556 step: 10800 | loss: 2.110938787 step: 10810 | loss: 2.107915536 step: 10820 | loss: 2.104893803 step: 10830 | loss: 2.101873593 step: 10840 | loss: 2.098854907 step: 10850 | loss: 2.095837748 step: 10860 | loss: 2.092822117 step: 10870 | loss: 2.089808017 step: 10880 | loss: 2.086795451 step: 10890 | loss: 2.083784420 step: 10900 | loss: 2.080774928 step: 10910 | loss: 2.077766975 step: 10920 | loss: 2.074760565 step: 10930 | loss: 2.071755699 step: 10940 | loss: 2.068752380 step: 10950 | loss: 2.065750609 step: 10960 | loss: 2.062750390 step: 10970 | loss: 2.059751723 step: 10980 | loss: 2.056754611 step: 10990 | loss: 2.053759057 step: 11000 | loss: 2.050765061 step: 11010 | loss: 2.047772627 step: 11020 | loss: 2.044781756 step: 11030 | loss: 2.041792450 step: 11040 | loss: 2.038804711 step: 11050 | loss: 2.035818541 step: 11060 | loss: 2.032833942 step: 11070 | loss: 2.029850916 step: 11080 | loss: 2.026869464 step: 11090 | loss: 2.023889589 step: 11100 | loss: 2.020911292 step: 11110 | loss: 2.017934575 step: 11120 | loss: 2.014959439 step: 11130 | loss: 2.011985887 step: 11140 | loss: 2.009013920 step: 11150 | loss: 2.006043541 step: 11160 | loss: 2.003074749 step: 11170 | loss: 2.000107548 step: 11180 | loss: 1.997141938 step: 11190 | loss: 1.994177921 step: 11200 | loss: 1.991215500 step: 11210 | loss: 1.988254674 step: 11220 | loss: 1.985295446 step: 11230 | loss: 1.982337818 step: 11240 | loss: 1.979381790 step: 11250 | loss: 1.976427364 step: 11260 | loss: 1.973474541 step: 11270 | loss: 1.970523323 step: 11280 | loss: 1.967573710 step: 11290 | loss: 1.964625705 step: 11300 | loss: 1.961679308 step: 11310 | loss: 1.958734521 step: 11320 | loss: 1.955791345 step: 11330 | loss: 1.952849780 step: 11340 | loss: 1.949909828 step: 11350 | loss: 1.946971490 step: 11360 | loss: 1.944034767 step: 11370 | loss: 1.941099660 step: 11380 | loss: 1.938166170 step: 11390 | loss: 1.935234297 step: 11400 | loss: 1.932304043 step: 11410 | loss: 1.929375409 step: 11420 | loss: 1.926448395 step: 11430 | loss: 1.923523001 step: 11440 | loss: 1.920599230 step: 11450 | loss: 1.917677081 step: 11460 | loss: 1.914756555 step: 11470 | loss: 1.911837652 step: 11480 | loss: 1.908920374 step: 11490 | loss: 1.906004720 step: 11500 | loss: 1.903090691 step: 11510 | loss: 1.900178288 step: 11520 | loss: 1.897267510 step: 11530 | loss: 1.894358359 step: 11540 | loss: 1.891450835 step: 11550 | loss: 1.888544937 step: 11560 | loss: 1.885640666 step: 11570 | loss: 1.882738022 step: 11580 | loss: 1.879837006 step: 11590 | loss: 1.876937617 step: 11600 | loss: 1.874039855 step: 11610 | loss: 1.871143720 step: 11620 | loss: 1.868249213 step: 11630 | loss: 1.865356333 step: 11640 | loss: 1.862465079 step: 11650 | loss: 1.859575452 step: 11660 | loss: 1.856687452 step: 11670 | loss: 1.853801078 step: 11680 | loss: 1.850916329 step: 11690 | loss: 1.848033206 step: 11700 | loss: 1.845151708 step: 11710 | loss: 1.842271834 step: 11720 | loss: 1.839393583 step: 11730 | loss: 1.836516956 step: 11740 | loss: 1.833641951 step: 11750 | loss: 1.830768568 step: 11760 | loss: 1.827896805 step: 11770 | loss: 1.825026662 step: 11780 | loss: 1.822158139 step: 11790 | loss: 1.819291234 step: 11800 | loss: 1.816425945 step: 11810 | loss: 1.813562273 step: 11820 | loss: 1.810700215 step: 11830 | loss: 1.807839771 step: 11840 | loss: 1.804980940 step: 11850 | loss: 1.802123720 step: 11860 | loss: 1.799268109 step: 11870 | loss: 1.796414107 step: 11880 | loss: 1.793561711 step: 11890 | loss: 1.790710921 step: 11900 | loss: 1.787861734 step: 11910 | loss: 1.785014149 step: 11920 | loss: 1.782168165 step: 11930 | loss: 1.779323779 step: 11940 | loss: 1.776480989 step: 11950 | loss: 1.773639794 step: 11960 | loss: 1.770800191 step: 11970 | loss: 1.767962179 step: 11980 | loss: 1.765125756 step: 11990 | loss: 1.762290919 step: 12000 | loss: 1.759457665 step: 12010 | loss: 1.756625994 step: 12020 | loss: 1.753795901 step: 12030 | loss: 1.750967386 step: 12040 | loss: 1.748140444 step: 12050 | loss: 1.745315074 step: 12060 | loss: 1.742491274 step: 12070 | loss: 1.739669039 step: 12080 | loss: 1.736848368 step: 12090 | loss: 1.734029258 step: 12100 | loss: 1.731211705 step: 12110 | loss: 1.728395707 step: 12120 | loss: 1.725581261 step: 12130 | loss: 1.722768363 step: 12140 | loss: 1.719957010 step: 12150 | loss: 1.717147199 step: 12160 | loss: 1.714338926 step: 12170 | loss: 1.711532189 step: 12180 | loss: 1.708726983 step: 12190 | loss: 1.705923305 step: 12200 | loss: 1.703121151 step: 12210 | loss: 1.700320518 step: 12220 | loss: 1.697521402 step: 12230 | loss: 1.694723798 step: 12240 | loss: 1.691927704 step: 12250 | loss: 1.689133114 step: 12260 | loss: 1.686340025 step: 12270 | loss: 1.683548433 step: 12280 | loss: 1.680758333 step: 12290 | loss: 1.677969721 step: 12300 | loss: 1.675182592 step: 12310 | loss: 1.672396943 step: 12320 | loss: 1.669612768 step: 12330 | loss: 1.666830063 step: 12340 | loss: 1.664048823 step: 12350 | loss: 1.661269044 step: 12360 | loss: 1.658490719 step: 12370 | loss: 1.655713846 step: 12380 | loss: 1.652938418 step: 12390 | loss: 1.650164430 step: 12400 | loss: 1.647391878 step: 12410 | loss: 1.644620756 step: 12420 | loss: 1.641851058 step: 12430 | loss: 1.639082779 step: 12440 | loss: 1.636315915 step: 12450 | loss: 1.633550458 step: 12460 | loss: 1.630786404 step: 12470 | loss: 1.628023747 step: 12480 | loss: 1.625262480 step: 12490 | loss: 1.622502599 step: 12500 | loss: 1.619744097 step: 12510 | loss: 1.616986969 step: 12520 | loss: 1.614231207 step: 12530 | loss: 1.611476806 step: 12540 | loss: 1.608723760 step: 12550 | loss: 1.605972062 step: 12560 | loss: 1.603221706 step: 12570 | loss: 1.600472686 step: 12580 | loss: 1.597724994 step: 12590 | loss: 1.594978625 step: 12600 | loss: 1.592233571 step: 12610 | loss: 1.589489826 step: 12620 | loss: 1.586747383 step: 12630 | loss: 1.584006235 step: 12640 | loss: 1.581266375 step: 12650 | loss: 1.578527796 step: 12660 | loss: 1.575790491 step: 12670 | loss: 1.573054453 step: 12680 | loss: 1.570319674 step: 12690 | loss: 1.567586147 step: 12700 | loss: 1.564853864 step: 12710 | loss: 1.562122819 step: 12720 | loss: 1.559393003 step: 12730 | loss: 1.556664409 step: 12740 | loss: 1.553937029 step: 12750 | loss: 1.551210856 step: 12760 | loss: 1.548485882 step: 12770 | loss: 1.545762099 step: 12780 | loss: 1.543039498 step: 12790 | loss: 1.540318073 step: 12800 | loss: 1.537597814 step: 12810 | loss: 1.534878714 step: 12820 | loss: 1.532160764 step: 12830 | loss: 1.529443957 step: 12840 | loss: 1.526728284 step: 12850 | loss: 1.524013737 step: 12860 | loss: 1.521300307 step: 12870 | loss: 1.518587985 step: 12880 | loss: 1.515876764 step: 12890 | loss: 1.513166635 step: 12900 | loss: 1.510457588 step: 12910 | loss: 1.507749617 step: 12920 | loss: 1.505042711 step: 12930 | loss: 1.502336861 step: 12940 | loss: 1.499632060 step: 12950 | loss: 1.496928299 step: 12960 | loss: 1.494225567 step: 12970 | loss: 1.491523858 step: 12980 | loss: 1.488823160 step: 12990 | loss: 1.486123466 step: 13000 | loss: 1.483424767 step: 13010 | loss: 1.480727053 step: 13020 | loss: 1.478030315 step: 13030 | loss: 1.475334544 step: 13040 | loss: 1.472639731 step: 13050 | loss: 1.469945866 step: 13060 | loss: 1.467252941 step: 13070 | loss: 1.464560946 step: 13080 | loss: 1.461869872 step: 13090 | loss: 1.459179710 step: 13100 | loss: 1.456490449 step: 13110 | loss: 1.453802082 step: 13120 | loss: 1.451114597 step: 13130 | loss: 1.448427987 step: 13140 | loss: 1.445742241 step: 13150 | loss: 1.443057351 step: 13160 | loss: 1.440373306 step: 13170 | loss: 1.437690098 step: 13180 | loss: 1.435007716 step: 13190 | loss: 1.432326152 step: 13200 | loss: 1.429645395 step: 13210 | loss: 1.426965437 step: 13220 | loss: 1.424286268 step: 13230 | loss: 1.421607879 step: 13240 | loss: 1.418930259 step: 13250 | loss: 1.416253400 step: 13260 | loss: 1.413577292 step: 13270 | loss: 1.410901926 step: 13280 | loss: 1.408227293 step: 13290 | loss: 1.405553382 step: 13300 | loss: 1.402880185 step: 13310 | loss: 1.400207691 step: 13320 | loss: 1.397535893 step: 13330 | loss: 1.394864780 step: 13340 | loss: 1.392194343 step: 13350 | loss: 1.389524572 step: 13360 | loss: 1.386855460 step: 13370 | loss: 1.384186996 step: 13380 | loss: 1.381519170 step: 13390 | loss: 1.378851975 step: 13400 | loss: 1.376185401 step: 13410 | loss: 1.373519438 step: 13420 | loss: 1.370854078 step: 13430 | loss: 1.368189312 step: 13440 | loss: 1.365525131 step: 13450 | loss: 1.362861525 step: 13460 | loss: 1.360198486 step: 13470 | loss: 1.357536005 step: 13480 | loss: 1.354874074 step: 13490 | loss: 1.352212683 step: 13500 | loss: 1.349551824 step: 13510 | loss: 1.346891488 step: 13520 | loss: 1.344231667 step: 13530 | loss: 1.341572353 step: 13540 | loss: 1.338913536 step: 13550 | loss: 1.336255208 step: 13560 | loss: 1.333597362 step: 13570 | loss: 1.330939989 step: 13580 | loss: 1.328283080 step: 13590 | loss: 1.325626627 step: 13600 | loss: 1.322970624 step: 13610 | loss: 1.320315061 step: 13620 | loss: 1.317659930 step: 13630 | loss: 1.315005225 step: 13640 | loss: 1.312350936 step: 13650 | loss: 1.309697057 step: 13660 | loss: 1.307043580 step: 13670 | loss: 1.304390497 step: 13680 | loss: 1.301737801 step: 13690 | loss: 1.299085485 step: 13700 | loss: 1.296433541 step: 13710 | loss: 1.293781962 step: 13720 | loss: 1.291130741 step: 13730 | loss: 1.288479871 step: 13740 | loss: 1.285829346 step: 13750 | loss: 1.283179157 step: 13760 | loss: 1.280529299 step: 13770 | loss: 1.277879765 step: 13780 | loss: 1.275230549 step: 13790 | loss: 1.272581643 step: 13800 | loss: 1.269933042 step: 13810 | loss: 1.267284739 step: 13820 | loss: 1.264636728 step: 13830 | loss: 1.261989002 step: 13840 | loss: 1.259341557 step: 13850 | loss: 1.256694386 step: 13860 | loss: 1.254047482 step: 13870 | loss: 1.251400841 step: 13880 | loss: 1.248754457 step: 13890 | loss: 1.246108324 step: 13900 | loss: 1.243462437 step: 13910 | loss: 1.240816790 step: 13920 | loss: 1.238171378 step: 13930 | loss: 1.235526196 step: 13940 | loss: 1.232881240 step: 13950 | loss: 1.230236503 step: 13960 | loss: 1.227591981 step: 13970 | loss: 1.224947669 step: 13980 | loss: 1.222303563 step: 13990 | loss: 1.219659657 step: 14000 | loss: 1.217015948 step: 14010 | loss: 1.214372431 step: 14020 | loss: 1.211729102 step: 14030 | loss: 1.209085956 step: 14040 | loss: 1.206442989 step: 14050 | loss: 1.203800197 step: 14060 | loss: 1.201157576 step: 14070 | loss: 1.198515123 step: 14080 | loss: 1.195872833 step: 14090 | loss: 1.193230703 step: 14100 | loss: 1.190588728 step: 14110 | loss: 1.187946907 step: 14120 | loss: 1.185305234 step: 14130 | loss: 1.182663707 step: 14140 | loss: 1.180022322 step: 14150 | loss: 1.177381077 step: 14160 | loss: 1.174739967 step: 14170 | loss: 1.172098990 step: 14180 | loss: 1.169458143 step: 14190 | loss: 1.166817423 step: 14200 | loss: 1.164176827 step: 14210 | loss: 1.161536353 step: 14220 | loss: 1.158895997 step: 14230 | loss: 1.156255757 step: 14240 | loss: 1.153615630 step: 14250 | loss: 1.150975615 step: 14260 | loss: 1.148335707 step: 14270 | loss: 1.145695906 step: 14280 | loss: 1.143056209 step: 14290 | loss: 1.140416613 step: 14300 | loss: 1.137777117 step: 14310 | loss: 1.135137718 step: 14320 | loss: 1.132498414 step: 14330 | loss: 1.129859204 step: 14340 | loss: 1.127220084 step: 14350 | loss: 1.124581054 step: 14360 | loss: 1.121942112 step: 14370 | loss: 1.119303256 step: 14380 | loss: 1.116664483 step: 14390 | loss: 1.114025793 step: 14400 | loss: 1.111387184 step: 14410 | loss: 1.108748653 step: 14420 | loss: 1.106110201 step: 14430 | loss: 1.103471824 step: 14440 | loss: 1.100833523 step: 14450 | loss: 1.098195294 step: 14460 | loss: 1.095557138 step: 14470 | loss: 1.092919052 step: 14480 | loss: 1.090281035 step: 14490 | loss: 1.087643087 step: 14500 | loss: 1.085005205 step: 14510 | loss: 1.082367389 step: 14520 | loss: 1.079729638 step: 14530 | loss: 1.077091950 step: 14540 | loss: 1.074454324 step: 14550 | loss: 1.071816760 step: 14560 | loss: 1.069179256 step: 14570 | loss: 1.066541811 step: 14580 | loss: 1.063904425 step: 14590 | loss: 1.061267096 step: 14600 | loss: 1.058629824 step: 14610 | loss: 1.055992607 step: 14620 | loss: 1.053355445 step: 14630 | loss: 1.050718337 step: 14640 | loss: 1.048081283 step: 14650 | loss: 1.045444281 step: 14660 | loss: 1.042807330 step: 14670 | loss: 1.040170431 step: 14680 | loss: 1.037533581 step: 14690 | loss: 1.034896781 step: 14700 | loss: 1.032260030 step: 14710 | loss: 1.029623327 step: 14720 | loss: 1.026986672 step: 14730 | loss: 1.024350064 step: 14740 | loss: 1.021713501 step: 14750 | loss: 1.019076985 step: 14760 | loss: 1.016440513 step: 14770 | loss: 1.013804086 step: 14780 | loss: 1.011167703 step: 14790 | loss: 1.008531363 step: 14800 | loss: 1.005895066 step: 14810 | loss: 1.003258812 step: 14820 | loss: 1.000622599 step: 14830 | loss: 0.997986428 step: 14840 | loss: 0.995350297 step: 14850 | loss: 0.992714207 step: 14860 | loss: 0.990078156 step: 14870 | loss: 0.987442146 step: 14880 | loss: 0.984806174 step: 14890 | loss: 0.982170240 step: 14900 | loss: 0.979534345 step: 14910 | loss: 0.976898487 step: 14920 | loss: 0.974262667 step: 14930 | loss: 0.971626883 step: 14940 | loss: 0.968991136 step: 14950 | loss: 0.966355425 step: 14960 | loss: 0.963719749 step: 14970 | loss: 0.961084109 step: 14980 | loss: 0.958448504 step: 14990 | loss: 0.955812933 step: 15000 | loss: 0.953177396 step: 15010 | loss: 0.950541893 step: 15020 | loss: 0.947906424 step: 15030 | loss: 0.945270987 step: 15040 | loss: 0.942635583 step: 15050 | loss: 0.940000212 step: 15060 | loss: 0.937364872 step: 15070 | loss: 0.934729565 step: 15080 | loss: 0.932094289 step: 15090 | loss: 0.929459043 step: 15100 | loss: 0.926823829 step: 15110 | loss: 0.924188645 step: 15120 | loss: 0.921553491 step: 15130 | loss: 0.918918367 step: 15140 | loss: 0.916283272 step: 15150 | loss: 0.913648206 step: 15160 | loss: 0.911013170 step: 15170 | loss: 0.908378162 step: 15180 | loss: 0.905743182 step: 15190 | loss: 0.903108231 step: 15200 | loss: 0.900473307 step: 15210 | loss: 0.897838410 step: 15220 | loss: 0.895203541 step: 15230 | loss: 0.892568699 step: 15240 | loss: 0.889933883 step: 15250 | loss: 0.887299094 step: 15260 | loss: 0.884664330 step: 15270 | loss: 0.882029593 step: 15280 | loss: 0.879394881 step: 15290 | loss: 0.876760195 step: 15300 | loss: 0.874125533 step: 15310 | loss: 0.871490897 step: 15320 | loss: 0.868856285 step: 15330 | loss: 0.866221697 step: 15340 | loss: 0.863587134 step: 15350 | loss: 0.860952594 step: 15360 | loss: 0.858318078 step: 15370 | loss: 0.855683585 step: 15380 | loss: 0.853049115 step: 15390 | loss: 0.850414669 step: 15400 | loss: 0.847780245 step: 15410 | loss: 0.845145843 step: 15420 | loss: 0.842511464 step: 15430 | loss: 0.839877106 step: 15440 | loss: 0.837242771 step: 15450 | loss: 0.834608457 step: 15460 | loss: 0.831974164 step: 15470 | loss: 0.829339893 step: 15480 | loss: 0.826705642 step: 15490 | loss: 0.824071412 step: 15500 | loss: 0.821437203 step: 15510 | loss: 0.818803014 step: 15520 | loss: 0.816168845 step: 15530 | loss: 0.813534696 step: 15540 | loss: 0.810900566 step: 15550 | loss: 0.808266457 step: 15560 | loss: 0.805632366 step: 15570 | loss: 0.802998295 step: 15580 | loss: 0.800364242 step: 15590 | loss: 0.797730208 step: 15600 | loss: 0.795096193 step: 15610 | loss: 0.792462196 step: 15620 | loss: 0.789828218 step: 15630 | loss: 0.787194257 step: 15640 | loss: 0.784560314 step: 15650 | loss: 0.781926389 step: 15660 | loss: 0.779292481 step: 15670 | loss: 0.776658591 step: 15680 | loss: 0.774024718 step: 15690 | loss: 0.771390861 step: 15700 | loss: 0.768757022 step: 15710 | loss: 0.766123199 step: 15720 | loss: 0.763489393 step: 15730 | loss: 0.760855602 step: 15740 | loss: 0.758221828 step: 15750 | loss: 0.755588070 step: 15760 | loss: 0.752954328 step: 15770 | loss: 0.750320601 step: 15780 | loss: 0.747686890 step: 15790 | loss: 0.745053194 step: 15800 | loss: 0.742419514 step: 15810 | loss: 0.739785848 step: 15820 | loss: 0.737152197 step: 15830 | loss: 0.734518561 step: 15840 | loss: 0.731884940 step: 15850 | loss: 0.729251333 step: 15860 | loss: 0.726617740 step: 15870 | loss: 0.723984162 step: 15880 | loss: 0.721350597 step: 15890 | loss: 0.718717046 step: 15900 | loss: 0.716083509 step: 15910 | loss: 0.713449986 step: 15920 | loss: 0.710816476 step: 15930 | loss: 0.708182980 step: 15940 | loss: 0.705549496 step: 15950 | loss: 0.702916026 step: 15960 | loss: 0.700282569 step: 15970 | loss: 0.697649124 step: 15980 | loss: 0.695015692 step: 15990 | loss: 0.692382273 step: 16000 | loss: 0.689748866 step: 16010 | loss: 0.687115472 step: 16020 | loss: 0.684482089 step: 16030 | loss: 0.681848719 step: 16040 | loss: 0.679215361 step: 16050 | loss: 0.676582014 step: 16060 | loss: 0.673948679 step: 16070 | loss: 0.671315356 step: 16080 | loss: 0.668682044 step: 16090 | loss: 0.666048744 step: 16100 | loss: 0.663415454 step: 16110 | loss: 0.660782176 step: 16120 | loss: 0.658148910 step: 16130 | loss: 0.655515654 step: 16140 | loss: 0.652882408 step: 16150 | loss: 0.650249173 step: 16160 | loss: 0.647615949 step: 16170 | loss: 0.644982735 step: 16180 | loss: 0.642349532 step: 16190 | loss: 0.639716339 step: 16200 | loss: 0.637083156 step: 16210 | loss: 0.634449983 step: 16220 | loss: 0.631816821 step: 16230 | loss: 0.629183668 step: 16240 | loss: 0.626550524 step: 16250 | loss: 0.623917391 step: 16260 | loss: 0.621284267 step: 16270 | loss: 0.618651153 step: 16280 | loss: 0.616018048 step: 16290 | loss: 0.613384952 step: 16300 | loss: 0.610751865 step: 16310 | loss: 0.608118788 step: 16320 | loss: 0.605485720 step: 16330 | loss: 0.602852660 step: 16340 | loss: 0.600219609 step: 16350 | loss: 0.597586568 step: 16360 | loss: 0.594953534 step: 16370 | loss: 0.592320510 step: 16380 | loss: 0.589687494 step: 16390 | loss: 0.587054486 step: 16400 | loss: 0.584421486 step: 16410 | loss: 0.581788495 step: 16420 | loss: 0.579155512 step: 16430 | loss: 0.576522537 step: 16440 | loss: 0.573889570 step: 16450 | loss: 0.571256611 step: 16460 | loss: 0.568623660 step: 16470 | loss: 0.565990717 step: 16480 | loss: 0.563357781 step: 16490 | loss: 0.560724853 step: 16500 | loss: 0.558091932 step: 16510 | loss: 0.555459019 step: 16520 | loss: 0.552826113 step: 16530 | loss: 0.550193215 step: 16540 | loss: 0.547560323 step: 16550 | loss: 0.544927439 step: 16560 | loss: 0.542294562 step: 16570 | loss: 0.539661692 step: 16580 | loss: 0.537028829 step: 16590 | loss: 0.534395973 step: 16600 | loss: 0.531763124 step: 16610 | loss: 0.529130281 step: 16620 | loss: 0.526497453 step: 16630 | loss: 0.523864617 step: 16640 | loss: 0.521231794 step: 16650 | loss: 0.518598977 step: 16660 | loss: 0.515966167 step: 16670 | loss: 0.513333364 step: 16680 | loss: 0.510700566 step: 16690 | loss: 0.508067775 step: 16700 | loss: 0.505434991 step: 16710 | loss: 0.502802212 step: 16720 | loss: 0.500169439 step: 16730 | loss: 0.497536673 step: 16740 | loss: 0.494903912 step: 16750 | loss: 0.492271157 step: 16760 | loss: 0.489638408 step: 16770 | loss: 0.487005664 step: 16780 | loss: 0.484372927 step: 16790 | loss: 0.481740195 step: 16800 | loss: 0.479107468 step: 16810 | loss: 0.476474747 step: 16820 | loss: 0.473842032 step: 16830 | loss: 0.471209322 step: 16840 | loss: 0.468576617 step: 16850 | loss: 0.465943918 step: 16860 | loss: 0.463311223 step: 16870 | loss: 0.460678534 step: 16880 | loss: 0.458045851 step: 16890 | loss: 0.455413172 step: 16900 | loss: 0.452780498 step: 16910 | loss: 0.450147830 step: 16920 | loss: 0.447515166 step: 16930 | loss: 0.444882507 step: 16940 | loss: 0.442249853 step: 16950 | loss: 0.439617204 step: 16960 | loss: 0.436984559 step: 16970 | loss: 0.434351920 step: 16980 | loss: 0.431719285 step: 16990 | loss: 0.429086654 step: 17000 | loss: 0.426454034 step: 17010 | loss: 0.423821408 step: 17020 | loss: 0.421188790 step: 17030 | loss: 0.418556178 step: 17040 | loss: 0.415923570 step: 17050 | loss: 0.413290966 step: 17060 | loss: 0.410658366 step: 17070 | loss: 0.408025771 step: 17080 | loss: 0.405393180 step: 17090 | loss: 0.402760593 step: 17100 | loss: 0.400128011 step: 17110 | loss: 0.397495432 step: 17120 | loss: 0.394862857 step: 17130 | loss: 0.392230287 step: 17140 | loss: 0.389597720 step: 17150 | loss: 0.386965158 step: 17160 | loss: 0.384332599 step: 17170 | loss: 0.381700044 step: 17180 | loss: 0.379067493 step: 17190 | loss: 0.376434946 step: 17200 | loss: 0.373802402 step: 17210 | loss: 0.371169862 step: 17220 | loss: 0.368537326 step: 17230 | loss: 0.365904793 step: 17240 | loss: 0.363272264 step: 17250 | loss: 0.360639739 step: 17260 | loss: 0.358007217 step: 17270 | loss: 0.355374698 step: 17280 | loss: 0.352742183 step: 17290 | loss: 0.350109672 step: 17300 | loss: 0.347477164 step: 17310 | loss: 0.344844659 step: 17320 | loss: 0.342212157 step: 17330 | loss: 0.339579660 step: 17340 | loss: 0.336947164 step: 17350 | loss: 0.334314672 step: 17360 | loss: 0.331682184 step: 17370 | loss: 0.329049698 step: 17380 | loss: 0.326417216 step: 17390 | loss: 0.323784737 step: 17400 | loss: 0.321152260 step: 17410 | loss: 0.318519787 step: 17420 | loss: 0.315887317 step: 17430 | loss: 0.313254850 step: 17440 | loss: 0.310622386 step: 17450 | loss: 0.307989924 step: 17460 | loss: 0.305357466 step: 17470 | loss: 0.302725010 step: 17480 | loss: 0.300092557 step: 17490 | loss: 0.297460107 step: 17500 | loss: 0.294827660 step: 17510 | loss: 0.292195216 step: 17520 | loss: 0.289562774 step: 17530 | loss: 0.286930335 step: 17540 | loss: 0.284297898 step: 17550 | loss: 0.281665464 step: 17560 | loss: 0.279033033 step: 17570 | loss: 0.276400605 step: 17580 | loss: 0.273768188 step: 17590 | loss: 0.271135757 step: 17600 | loss: 0.268503335 step: 17610 | loss: 0.265870916 step: 17620 | loss: 0.263238499 step: 17630 | loss: 0.260606086 step: 17640 | loss: 0.257973674 step: 17650 | loss: 0.255341266 step: 17660 | loss: 0.252708859 step: 17670 | loss: 0.250076455 step: 17680 | loss: 0.247444053 step: 17690 | loss: 0.244811653 step: 17700 | loss: 0.242179256 step: 17710 | loss: 0.239546861 step: 17720 | loss: 0.236914468 step: 17730 | loss: 0.234282077 step: 17740 | loss: 0.231649689 step: 17750 | loss: 0.229017303 step: 17760 | loss: 0.226384918 step: 17770 | loss: 0.223752536 step: 17780 | loss: 0.221120157 step: 17790 | loss: 0.218487785 step: 17800 | loss: 0.215855403 step: 17810 | loss: 0.213223029 step: 17820 | loss: 0.210590657 step: 17830 | loss: 0.207958287 step: 17840 | loss: 0.205325919 step: 17850 | loss: 0.202693553 step: 17860 | loss: 0.200061189 step: 17870 | loss: 0.197428827 step: 17880 | loss: 0.194796467 step: 17890 | loss: 0.192164109 step: 17900 | loss: 0.189531752 step: 17910 | loss: 0.186899397 step: 17920 | loss: 0.184267045 step: 17930 | loss: 0.181634694 step: 17940 | loss: 0.179002344 step: 17950 | loss: 0.176370001 step: 17960 | loss: 0.173737652 step: 17970 | loss: 0.171105308 step: 17980 | loss: 0.168472965 step: 17990 | loss: 0.165840625 step: 18000 | loss: 0.163208286 step: 18010 | loss: 0.160575948 step: 18020 | loss: 0.157943613 step: 18030 | loss: 0.155311279 step: 18040 | loss: 0.152678946 step: 18050 | loss: 0.150046616 step: 18060 | loss: 0.147414286 step: 18070 | loss: 0.144781959 step: 18080 | loss: 0.142149633 step: 18090 | loss: 0.139517308 step: 18100 | loss: 0.136884986 step: 18110 | loss: 0.134252671 step: 18120 | loss: 0.131620344 step: 18130 | loss: 0.128988026 step: 18140 | loss: 0.126355708 step: 18150 | loss: 0.123723392 step: 18160 | loss: 0.121091078 step: 18170 | loss: 0.118458765 step: 18180 | loss: 0.115826454 step: 18190 | loss: 0.113194144 step: 18200 | loss: 0.110561835 step: 18210 | loss: 0.107929528 step: 18220 | loss: 0.105297222 step: 18230 | loss: 0.102664919 step: 18240 | loss: 0.100032619 step: 18250 | loss: 0.097400313 step: 18260 | loss: 0.094768012 step: 18270 | loss: 0.092135712 step: 18280 | loss: 0.089503414 step: 18290 | loss: 0.086871117 step: 18300 | loss: 0.084238822 step: 18310 | loss: 0.081606527 step: 18320 | loss: 0.078974255 step: 18330 | loss: 0.076341948 step: 18340 | loss: 0.073709655 step: 18350 | loss: 0.071077363 step: 18360 | loss: 0.068445074 step: 18370 | loss: 0.065812787 step: 18380 | loss: 0.063180501 step: 18390 | loss: 0.060548239 step: 18400 | loss: 0.057915936 step: 18410 | loss: 0.055283656 step: 18420 | loss: 0.052651371 step: 18430 | loss: 0.050019088 step: 18440 | loss: 0.047386809 step: 18450 | loss: 0.044754580 step: 18460 | loss: 0.042122271 step: 18470 | loss: 0.039489986 step: 18480 | loss: 0.036857703 step: 18490 | loss: 0.034225439 step: 18500 | loss: 0.031593159 step: 18510 | loss: 0.028960931 step: 18520 | loss: 0.026328615 step: 18530 | loss: 0.023696347 step: 18540 | loss: 0.021064082 step: 18550 | loss: 0.018431819 step: 18560 | loss: 0.015799605 step: 18570 | loss: 0.013167359 step: 18580 | loss: 0.010535134 step: 18590 | loss: 0.007903057 step: 18600 | loss: 0.005270710 step: 18610 | loss: 0.002642820 - final loss: 0.000800 -> compiled owl-opt.0.0.1 -> removed owl-opt.0.0.1 -> installed owl-opt.0.0.1 Done. # To update the current shell environment, run: eval $(opam env) 2025-02-24 09:18.13 ---> saved as "24435695003e4cd22a053647e02daa14662ebb7a934c0635d0e0d6eb4b347180" Job succeeded 2025-02-24 09:18.23: Job succeeded