<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3.dtd">
<article article-type="research-article" dtd-version="1.3" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xml:lang="ru"><front><journal-meta><journal-id journal-id-type="publisher-id">vmait</journal-id><journal-title-group><journal-title xml:lang="ru">Computational Mathematics and Information Technologies</journal-title><trans-title-group xml:lang="en"><trans-title>Computational Mathematics and Information Technologies</trans-title></trans-title-group></journal-title-group><issn pub-type="epub">2587-8999</issn><publisher><publisher-name>Донской государственный технический университет</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="doi">10.23947/2587-8999-2026-10-1-7-20</article-id><article-id custom-type="elpub" pub-id-type="custom">vmait-221</article-id><article-categories><subj-group subj-group-type="heading"><subject>Research Article</subject></subj-group><subj-group subj-group-type="section-heading" xml:lang="ru"><subject>ВЫЧИСЛИТЕЛЬНАЯ  МАТЕМАТИКА</subject></subj-group><subj-group subj-group-type="section-heading" xml:lang="en"><subject>COMPUTATIONAL  MATHEMATICS</subject></subj-group></article-categories><title-group><article-title>Семантическая сегментация с оценкой неопределённости на основе модели Дирихле и анизотропной регуляризации</article-title><trans-title-group xml:lang="en"><trans-title>Semantic Segmentation with Uncertainty Estimation Based on the Dirichlet Model and Anisotropic Regularization</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-3651-7629</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Щетинин</surname><given-names>Е. Ю.</given-names></name><name name-style="western" xml:lang="en"><surname>Shchetinin</surname><given-names>E. Yu.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Евгений Юрьевич Щетинин, доктор физико-математических наук, профессор</p><p>кафедра информационных технологий</p><p>299053; ул. Университетская, 33; Севастополь</p><p>SPIN-code</p></bio><bio xml:lang="en"><p>Evgeny Yu. Shchetinin, Dr. Sci. (Phys.-Math.), Professor</p><p>Department of Information Technologies</p><p>299053; 33, Universitetskaya St.; Sevastopol</p><p>SPIN-code</p></bio><email xlink:type="simple">riviera-molto@mail.ru</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><contrib-id contrib-id-type="orcid">https://orcid.org/0009-0003-3870-1558</contrib-id><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Шевчук</surname><given-names>А. А.</given-names></name><name name-style="western" xml:lang="en"><surname>Shevchuk</surname><given-names>A. A.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Андрей Андреевич Шевчук, аспирант</p><p>кафедра информационных технологий</p><p>299053; ул. Университетская, 33; Севастополь</p></bio><bio xml:lang="en"><p>Andrey A. Shevchuk, PhD Student</p><p>Department of Information Technologies</p><p>299053; 33, Universitetskaya St.; Sevastopol</p></bio><email xlink:type="simple">andreiluck11@yandex.ru</email><xref ref-type="aff" rid="aff-1"/></contrib></contrib-group><aff-alternatives id="aff-1"><aff xml:lang="ru">Севастопольский государственный университет<country>Россия</country></aff><aff xml:lang="en">Sevastopol State University<country>Russian Federation</country></aff></aff-alternatives><pub-date pub-type="collection"><year>2026</year></pub-date><pub-date pub-type="epub"><day>02</day><month>04</month><year>2026</year></pub-date><volume>10</volume><issue>1</issue><fpage>7</fpage><lpage>20</lpage><permissions><copyright-statement>Copyright &amp;#x00A9; Щетинин Е.Ю., Шевчук А.А., 2026</copyright-statement><copyright-year>2026</copyright-year><copyright-holder xml:lang="ru">Щетинин Е.Ю., Шевчук А.А.</copyright-holder><copyright-holder xml:lang="en">Shchetinin E.Y., Shevchuk A.A.</copyright-holder><license license-type="creative-commons-attribution" xlink:href="https://creativecommons.org/licenses/by/4.0/" xlink:type="simple"><license-p>This work is licensed under a Creative Commons Attribution 4.0 License.</license-p></license></permissions><self-uri xlink:href="https://www.cmit-journal.ru/jour/article/view/221">https://www.cmit-journal.ru/jour/article/view/221</self-uri><abstract><sec><title>   Введение</title><p>   Введение. В задачах вычислительной математики вариационные методы минимизации дискретных энергий широко применяются для регуляризации некорректных задач. Однако стандартные дискретные схемы зачастую обладают масштабной несогласованностью: при измельчении сетки (h→0) веса, зависящие от ненормированных скачков функции, вырождаются, что приводит к тривиализации анизотропных свойств предельного оператора. В данной работе предложен вычислительный метод, решающий эту проблему за счет параметризации распределения Дирихле и строго обоснованной анизотропной пространственной регуляризации.</p></sec><sec><title>   Материалы и методы</title><p>   Материалы и методы. Математическая модель формулируется как задача оптимизации составного функционала в пространстве сеточных функций. Функционал включает ожидаемую логарифмическую функцию потерь, регуляризацию Кульбака-Лейблера и пространственные регуляризаторы типа взвешенной энергии Дирихле. Для обеспечения структурной состоятельности дискретного оператора edge-aware весовые функции конструируются строго через нормированные конечные разности. Асимптотическое поведение дискретных энергий исследуется с помощью аппарата сходимости.</p><p>   Результаты исследования. Главным теоретическим результатом работы является математическое доказательство Γ-сходимости семейства дискретных анизотропных функционалов к нетривиальному непрерывному пределу в топологии L²(Ω). Доказана равнокоэрцитивность дискретных энергий, гарантирующая сходимость последовательности почти минимизаторов к решению непрерывной задачи.</p></sec><sec><title>   Обсуждение</title><p>   Обсуждение. Использование нормированных конечных разностей при построении весов восстанавливает размерную однородность и обеспечивает строгую масштабную инвариантность дискретизации нелокальных операторов.</p></sec><sec><title>   Заключение</title><p>   Заключение. Предложенный метод успешно связывает непрерывные вариационные постановки с дискретными предиктивными моделями, предоставляя теоретически обоснованный и вычислительно эффективный (дополнительные расходы инференса составляют 17–18 %) инструмент с контролируемой погрешностью.</p></sec></abstract><trans-abstract xml:lang="en"><sec><title>   Introduction</title><p>   Introduction. In computational mathematics, variational methods for minimizing discrete energies are widely used for the regularization of ill-posed problems. However, standard discrete schemes often suffer from scale inconsistency: upon mesh refinement (h→0), weights depending on unnormalized jumps of the function degenerate, leading to trivializationof the anisotropic properties of the limiting operator. In this paper, a computational method is proposed that solves this problem by parameterizing the Dirichlet distribution and employing rigorously justified anisotropic spatial regularization.</p></sec><sec><title>   Materials and Methods</title><p>   Materials and Methods. The mathematical model is formulated as an optimization problem for a composite functional in the space of grid functions. The functional includes an expected logarithmic loss function, Kullback-Leibler regularization, and spatial regularizers of the weighted Dirichlet energy type. To ensure the structural consistency of the discrete operator, edge-aware weight functions are constructed strictly through normalized finite differences. The asymptotic behavior of the discrete energies is investigated using the apparatus of Γ-convergence.</p></sec><sec><title>   Results</title><p>   Results. The main theoretical result of the work is a mathematical proof of the Γ-convergence of a family of discrete anisotropic functionals to a non-trivial continuous limit in the L²(Ω) topology. The equicoercivity of the discrete energies is proven, guaranteeing the convergence of a sequence of almost minimizers to the solution of the continuous problem.</p></sec><sec><title>   Discussion</title><p>   Discussion. The use of normalized finite differences in constructing the weights restores dimensional homogeneity and ensures strict scale invariance of the discretization of non-local operators.</p></sec><sec><title>   Conclusion</title><p>   Conclusion. The proposed method successfully links continuous variational formulations with discrete predictive models, providing a theoretically justified and computationally efficient tool (additional inference costs amount to 17–18 %) with controlled error.</p></sec></trans-abstract><kwd-group xml:lang="ru"><kwd>семантическая сегментация</kwd><kwd>распределение Дирихле</kwd><kwd>оценка неопределённости</kwd><kwd>калибровка вероятностей</kwd><kwd>анизотропная регуляризация</kwd><kwd>энергия Дирихле</kwd><kwd>Γ-сходимость</kwd><kwd>равнокоэрцитивность</kwd><kwd>медицинские изображения</kwd></kwd-group><kwd-group xml:lang="en"><kwd>semantic segmentation</kwd><kwd>Dirichlet distribution</kwd><kwd>uncertainty estimation</kwd><kwd>probability calibration</kwd><kwd>anisotropic regularization</kwd><kwd>Dirichlet energy</kwd><kwd>Γ-convergence</kwd><kwd>equicoercivity</kwd><kwd>medical images</kwd></kwd-group><funding-group xml:lang="ru"><funding-statement>Работа выполнена при поддержке Севастопольского государственного университета (проект № 42−01−09/319/2025−1)</funding-statement></funding-group><funding-group xml:lang="en"><funding-statement>The work was supported by Sevastopol State University (Project No. 42−01−09/319/2025−1)</funding-statement></funding-group></article-meta></front><back><ref-list><title>References</title><ref id="cit1"><label>1</label><citation-alternatives><mixed-citation xml:lang="ru">Begoli E., Bhattacharya T., Kusnezov D. The need for uncertainty quantification in machine-assisted medical decision making. Nat Mach Intell. 2019;1(1):20–23. doi: 10.1038/s42256-018-0004-1</mixed-citation><mixed-citation xml:lang="en">Begoli E., Bhattacharya T., Kusnezov D. The need for uncertainty quantification in machine-assisted medical decision making. Nat Mach Intell. 2019;1(1):20–23. doi: 10.1038/s42256-018-0004-1</mixed-citation></citation-alternatives></ref><ref id="cit2"><label>2</label><citation-alternatives><mixed-citation xml:lang="ru">Abdar M., Pourpanah F., Hussain S., et al. A review of uncertainty quantification in deep learning: Techniques, applications and challenges. Inf Fusion. 2021;76:243–297. doi: 10.1016/j.inffus.2021.05.008</mixed-citation><mixed-citation xml:lang="en">Abdar M., Pourpanah F., Hussain S., et al. A review of uncertainty quantification in deep learning: Techniques, applications and challenges. Inf Fusion. 2021;76:243–297. doi: 10.1016/j.inffus.2021.05.008</mixed-citation></citation-alternatives></ref><ref id="cit3"><label>3</label><citation-alternatives><mixed-citation xml:lang="ru">Gal Y., Ghahramani Z. Dropout as a Bayesian approximation: representing model uncertainty in deep learning. In: Proc. ICML. New York: PMLR; 2016. P. 1050–1059.</mixed-citation><mixed-citation xml:lang="en">Gal Y., Ghahramani Z. Dropout as a Bayesian approximation: representing model uncertainty in deep learning. In: Proc. ICML. New York: PMLR; 2016. P. 1050–1059.</mixed-citation></citation-alternatives></ref><ref id="cit4"><label>4</label><citation-alternatives><mixed-citation xml:lang="ru">Lakshminarayanan B., Pritzel A., Blundell C. Simple and scalable predictive uncertainty estimation using deep ensembles. In: Advances in Neural Information Processing Systems. 2017;30:6402–6413.</mixed-citation><mixed-citation xml:lang="en">Lakshminarayanan B., Pritzel A., Blundell C. Simple and scalable predictive uncertainty estimation using deep ensembles. In: Advances in Neural Information Processing Systems. 2017;30:6402–6413.</mixed-citation></citation-alternatives></ref><ref id="cit5"><label>5</label><citation-alternatives><mixed-citation xml:lang="ru">Sensoy M., Kaplan L., Kandemir M. Evidential deep learning to quantify classification uncertainty. In: Advances in Neural Information Processing Systems. 2018;31:3183–3193.</mixed-citation><mixed-citation xml:lang="en">Sensoy M., Kaplan L., Kandemir M. Evidential deep learning to quantify classification uncertainty. In: Advances in Neural Information Processing Systems. 2018;31:3183–3193.</mixed-citation></citation-alternatives></ref><ref id="cit6"><label>6</label><citation-alternatives><mixed-citation xml:lang="ru">Malinin A., Gales M. Predictive uncertainty estimation via prior networks. In: Advances in Neural Information Processing Systems. 2018;31:7047–7058.</mixed-citation><mixed-citation xml:lang="en">Malinin A., Gales M. Predictive uncertainty estimation via prior networks. In: Advances in Neural Information Processing Systems. 2018;31:7047–7058.</mixed-citation></citation-alternatives></ref><ref id="cit7"><label>7</label><citation-alternatives><mixed-citation xml:lang="ru">Jungo A., Reyes M. Assessing reliability and challenges of uncertainty estimations for medical image segmentation. In: MICCAI 2019. LNCS, vol. 11765. Cham: Springer; 2019. P. 48–56. doi: 10.1007/978-3-030-32245-8_6</mixed-citation><mixed-citation xml:lang="en">Jungo A., Reyes M. Assessing reliability and challenges of uncertainty estimations for medical image segmentation. In: MICCAI 2019. LNCS, vol. 11765. Cham: Springer; 2019. P. 48–56. doi: 10.1007/978-3-030-32245-8_6</mixed-citation></citation-alternatives></ref><ref id="cit8"><label>8</label><citation-alternatives><mixed-citation xml:lang="ru">Nair T., Precup D., Arnold D.L., Arbel T. Exploring uncertainty measures in deep networks for multiple sclerosis lesion detection and segmentation. Med Image Anal. 2020;59:101557. doi: 10.1016/j.media.2019.101557</mixed-citation><mixed-citation xml:lang="en">Nair T., Precup D., Arnold D.L., Arbel T. Exploring uncertainty measures in deep networks for multiple sclerosis lesion detection and segmentation. Med Image Anal. 2020;59:101557. doi: 10.1016/j.media.2019.101557</mixed-citation></citation-alternatives></ref><ref id="cit9"><label>9</label><citation-alternatives><mixed-citation xml:lang="ru">Mehrtash A., Wells W.M., Tempany C.M., Abolmaesumi P., Kapur T. Confidence calibration and predictive uncertainty estimation for deep medical image segmentation. IEEE Trans Med Imaging. 2020;39(12):3868–3878. doi: 10.1109/TMI.2020.3006437</mixed-citation><mixed-citation xml:lang="en">Mehrtash A., Wells W.M., Tempany C.M., Abolmaesumi P., Kapur T. Confidence calibration and predictive uncertainty estimation for deep medical image segmentation. IEEE Trans Med Imaging. 2020;39(12):3868–3878. doi: 10.1109/TMI.2020.3006437</mixed-citation></citation-alternatives></ref><ref id="cit10"><label>10</label><citation-alternatives><mixed-citation xml:lang="ru">Li H., Nan Y., Del Ser J., Yang G. Region-based evidential deep learning to quantify uncertainty and improve robustness of brain tumor segmentation. Neural Comput Appl. 2023;35:22071–22085. doi: 10.1007/s00521-022-08016-4</mixed-citation><mixed-citation xml:lang="en">Li H., Nan Y., Del Ser J., Yang G. Region-based evidential deep learning to quantify uncertainty and improve robustness of brain tumor segmentation. Neural Comput Appl. 2023;35:22071–22085. doi: 10.1007/s00521-022-08016-4</mixed-citation></citation-alternatives></ref><ref id="cit11"><label>11</label><citation-alternatives><mixed-citation xml:lang="ru">UDEL: Rethinking uncertainty dynamic estimation learning for ambiguous medical image segmentation. Digit Signal Process. 2025;169:105723. doi: 10.1016/j.dsp.2025.105723</mixed-citation><mixed-citation xml:lang="en">UDEL: Rethinking uncertainty dynamic estimation learning for ambiguous medical image segmentation. Digit Signal Process. 2025;169:105723. doi: 10.1016/j.dsp.2025.105723</mixed-citation></citation-alternatives></ref><ref id="cit12"><label>12</label><citation-alternatives><mixed-citation xml:lang="ru">Yang B., Zhang X., Zhang H., et al. Structural uncertainty estimation for medical image segmentation. Med Image Anal. 2025;103:103602. doi: 10.1016/j.media.2025.103602</mixed-citation><mixed-citation xml:lang="en">Yang B., Zhang X., Zhang H., et al. Structural uncertainty estimation for medical image segmentation. Med Image Anal. 2025;103:103602. doi: 10.1016/j.media.2025.103602</mixed-citation></citation-alternatives></ref><ref id="cit13"><label>13</label><citation-alternatives><mixed-citation xml:lang="ru">Han K., Wang S., Chen J., et al., Region uncertainty estimation for medical image segmentation with noisy labels. IEEE Trans Med Imaging. 2025;44(12):5197–5207. doi: 10.1109/TMI.2025.3589058</mixed-citation><mixed-citation xml:lang="en">Han K., Wang S., Chen J., et al., Region uncertainty estimation for medical image segmentation with noisy labels. IEEE Trans Med Imaging. 2025;44(12):5197–5207. doi: 10.1109/TMI.2025.3589058</mixed-citation></citation-alternatives></ref><ref id="cit14"><label>14</label><citation-alternatives><mixed-citation xml:lang="ru">Dal Maso G. An Introduction to Γ-Convergence. Boston: Birkhäuser; 1993. doi: 10.1007/978-1-4612-0327-8</mixed-citation><mixed-citation xml:lang="en">Dal Maso G. An Introduction to Γ-Convergence. Boston: Birkhäuser; 1993. doi: 10.1007/978-1-4612-0327-8</mixed-citation></citation-alternatives></ref><ref id="cit15"><label>15</label><citation-alternatives><mixed-citation xml:lang="ru">Braides A. Γ-Convergence for Beginners. Oxford: Oxford University Press; 2002. doi: 10.1093/acprof:oso/9780198507840.001.0001</mixed-citation><mixed-citation xml:lang="en">Braides A. Γ-Convergence for Beginners. Oxford: Oxford University Press; 2002. doi: 10.1093/acprof:oso/9780198507840.001.0001</mixed-citation></citation-alternatives></ref><ref id="cit16"><label>16</label><citation-alternatives><mixed-citation xml:lang="ru">Ciarlet P.G. The Finite Element Method for Elliptic Problems. Philadelphia: SIAM; 2002. doi: 10.1137/1.9780898719208</mixed-citation><mixed-citation xml:lang="en">Ciarlet P.G. The Finite Element Method for Elliptic Problems. Philadelphia: SIAM; 2002. doi: 10.1137/1.9780898719208</mixed-citation></citation-alternatives></ref><ref id="cit17"><label>17</label><citation-alternatives><mixed-citation xml:lang="ru">Ronneberger O., Fischer P., Brox T. U-Net: convolutional networks for biomedical image segmentation. In: MICCAI 2015. LNCS, vol. 9351. Cham: Springer; 2015. P. 234–241. doi: 10.1007/978-3-319-24574-4_28</mixed-citation><mixed-citation xml:lang="en">Ronneberger O., Fischer P., Brox T. U-Net: convolutional networks for biomedical image segmentation. In: MICCAI 2015. LNCS, vol. 9351. Cham: Springer; 2015. P. 234–241. doi: 10.1007/978-3-319-24574-4_28</mixed-citation></citation-alternatives></ref><ref id="cit18"><label>18</label><citation-alternatives><mixed-citation xml:lang="ru">He K., Zhang X., Ren S., Sun J. Deep residual learning for image recognition. In: Proc. CVPR. 2016. P. 770–778. doi: 10.1109/CVPR.2016.90</mixed-citation><mixed-citation xml:lang="en">He K., Zhang X., Ren S., Sun J. Deep residual learning for image recognition. In: Proc. CVPR. 2016. P. 770–778. doi: 10.1109/CVPR.2016.90</mixed-citation></citation-alternatives></ref><ref id="cit19"><label>19</label><citation-alternatives><mixed-citation xml:lang="ru">Kingma D.P., Ba J. Adam: a method for stochastic optimization. In: Proc. ICLR. 2015. arXiv:1412.6980.</mixed-citation><mixed-citation xml:lang="en">Kingma D.P., Ba J. Adam: a method for stochastic optimization. In: Proc. ICLR. 2015. arXiv:1412.6980.</mixed-citation></citation-alternatives></ref><ref id="cit20"><label>20</label><citation-alternatives><mixed-citation xml:lang="ru">Bernard O., Lalande A., Zotti C., et al. Deep learning techniques for automatic MRI cardiac multistructures segmentation and diagnosis. IEEE Trans Med Imaging. 2018;37(11):2514–2525. doi: 10.1109/TMI.2018.2837502</mixed-citation><mixed-citation xml:lang="en">Bernard O., Lalande A., Zotti C., et al. Deep learning techniques for automatic MRI cardiac multistructures segmentation and diagnosis. IEEE Trans Med Imaging. 2018;37(11):2514–2525. doi: 10.1109/TMI.2018.2837502</mixed-citation></citation-alternatives></ref><ref id="cit21"><label>21</label><citation-alternatives><mixed-citation xml:lang="ru">Landman B.A., Xu Z., Iglesias J.E., et al. MICCAI Multi-Atlas Labeling Beyond the Cranial Vault ⸺ Workshop and Challenge. doi: 10.7303/syn3193805</mixed-citation><mixed-citation xml:lang="en">Landman B.A., Xu Z., Iglesias J.E., et al. MICCAI Multi-Atlas Labeling Beyond the Cranial Vault ⸺ Workshop and Challenge. doi: 10.7303/syn3193805</mixed-citation></citation-alternatives></ref><ref id="cit22"><label>22</label><citation-alternatives><mixed-citation xml:lang="ru">Kavur A.E., Gezer N.S., Bariş M., et al. CHAOS Challenge ⸺ combined (CT-MR) healthy abdominal organ segmentation. Med Image Anal. 2021;69:101950. doi: 10.1016/j.media.2020.101950</mixed-citation><mixed-citation xml:lang="en">Kavur A.E., Gezer N.S., Bariş M., et al. CHAOS Challenge ⸺ combined (CT-MR) healthy abdominal organ segmentation. Med Image Anal. 2021;69:101950. doi: 10.1016/j.media.2020.101950</mixed-citation></citation-alternatives></ref><ref id="cit23"><label>23</label><citation-alternatives><mixed-citation xml:lang="ru">Geifman Y., El-Yaniv R. Selective classification for deep neural networks. In: Advances in Neural Information Processing Systems. 2017;30:4878–4887.</mixed-citation><mixed-citation xml:lang="en">Geifman Y., El-Yaniv R. Selective classification for deep neural networks. In: Advances in Neural Information Processing Systems. 2017;30:4878–4887.</mixed-citation></citation-alternatives></ref><ref id="cit24"><label>24</label><citation-alternatives><mixed-citation xml:lang="ru">Isensee F., Jaeger P.F., Kohl S.A.A. et al. nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nat Methods. 2021;18(2):203–211. doi: 10.1038/s41592-020-01008-z</mixed-citation><mixed-citation xml:lang="en">Isensee F., Jaeger P.F., Kohl S.A.A. et al. nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nat Methods. 2021;18(2):203–211. doi: 10.1038/s41592-020-01008-z</mixed-citation></citation-alternatives></ref><ref id="cit25"><label>25</label><citation-alternatives><mixed-citation xml:lang="ru">Perona P., Malik J. Scale-space and edge detection using anisotropic diffusion. IEEE Trans Pattern Anal Mach Intell. 1990;12(7):629–639. doi: 10.1109/34.56205</mixed-citation><mixed-citation xml:lang="en">Perona P., Malik J. Scale-space and edge detection using anisotropic diffusion. IEEE Trans Pattern Anal Mach Intell. 1990;12(7):629–639. doi: 10.1109/34.56205</mixed-citation></citation-alternatives></ref><ref id="cit26"><label>26</label><citation-alternatives><mixed-citation xml:lang="ru">Бахвалов Н.С., Жидков Н.П., Кобельков Г.М. Численные методы. Москва: БИНОМ; 2012. 636 с.</mixed-citation><mixed-citation xml:lang="en">Bakhvalov N.S., Zhidkov N.P., Kobelkov G.M. Numerical methods. Moscow: BINOM; 2012. 636 p.</mixed-citation></citation-alternatives></ref><ref id="cit27"><label>27</label><citation-alternatives><mixed-citation xml:lang="ru">Джонсон Н.Л., Котц С., Балакришнан Н. Непрерывные одномерные распределения. Т. 2. Нью-Йорк: Wiley; 1995.</mixed-citation><mixed-citation xml:lang="en">Johnson N.L., Kotz S., Balakrishnan N. Continuous Univariate Distributions. Vol. 2. 2&lt;sup&gt;nd&lt;/sup&gt; ed. New York: Wiley; 1995. (In Russ.)</mixed-citation></citation-alternatives></ref><ref id="cit28"><label>28</label><citation-alternatives><mixed-citation xml:lang="ru">Тихонов А.Н., Арсенин В.Я. Методы решения некорректных задач. Москва: Наука; 1979. 288 с.</mixed-citation><mixed-citation xml:lang="en">Tikhonov A.N., Arsenin V.Ya. Methods for solving ill-posed problems. Moscow: Nauka; 1979. 288 p. (In Russ.)</mixed-citation></citation-alternatives></ref><ref id="cit29"><label>29</label><citation-alternatives><mixed-citation xml:lang="ru">Самарский А.А. Теория разностных схем. Москва: Наука; 1989. 616 с.</mixed-citation><mixed-citation xml:lang="en">Samarsky A.A. The theory of difference schemes. Moscow: Nauka; 1989. 616 p. (In Russ.)</mixed-citation></citation-alternatives></ref><ref id="cit30"><label>30</label><citation-alternatives><mixed-citation xml:lang="ru">Chen T., Xu B., Zhang C., Guestrin C. Training deep nets with sublinear memory cost. arXiv:1604.06174. 2016.</mixed-citation><mixed-citation xml:lang="en">Chen T., Xu B., Zhang C., Guestrin C. Training deep nets with sublinear memory cost. arXiv:1604.06174. 2016.</mixed-citation></citation-alternatives></ref><ref id="cit31"><label>31</label><citation-alternatives><mixed-citation xml:lang="ru">Micikevicius P., Narang S., Alben J. et al. Mixed precision training. In: Proc. ICLR. 2018. arXiv:1710.03740.</mixed-citation><mixed-citation xml:lang="en">Micikevicius P., Narang S., Alben J. et al. Mixed precision training. In: Proc. ICLR. 2018. arXiv:1710.03740.</mixed-citation></citation-alternatives></ref></ref-list><fn-group><fn fn-type="conflict"><p>The authors declare that there are no conflicts of interest present.</p></fn></fn-group></back></article>
