From a47456296ee14be6217d66f2e90eaecbc3a89f02 Mon Sep 17 00:00:00 2001 From: Dave Welsch Date: Tue, 5 Nov 2024 15:50:05 -0800 Subject: [PATCH 1/3] Set up folder structure and stubs for beta doc site. Signed-off-by: Dave Welsch --- Docs/beta/_static/style.css | 1365 ++++++++++++++++++---- Docs/beta/api/index.rst | 5 + Docs/beta/conf.py | 49 +- Docs/beta/examples/index.rst | 5 + Docs/beta/index.rst | 71 +- Docs/beta/install/index.rst | 5 + Docs/beta/install/quick-start.rst | 5 + Docs/beta/rn/index.rst | 5 + Docs/beta/ug/features/adaround.rst | 102 ++ Docs/beta/ug/features/autoquant.rst | 47 + Docs/beta/ug/features/bn.rst | 47 + Docs/beta/ug/features/cle.rst | 47 + Docs/beta/ug/features/cp.rst | 47 + Docs/beta/ug/features/index.rst | 141 +++ Docs/beta/ug/features/qat.rst | 47 + Docs/beta/ug/features/quant_analyzer.rst | 47 + Docs/beta/ug/features/quant_sim.rst | 47 + Docs/beta/ug/features/spatial_svd.rst | 47 + Docs/beta/ug/features/visualization.rst | 47 + Docs/beta/ug/features/weight_svd.rst | 47 + Docs/beta/ug/index.rst | 17 + Docs/beta/ug/optimization/index.rst | 3 + 22 files changed, 1996 insertions(+), 247 deletions(-) create mode 100644 Docs/beta/api/index.rst create mode 100644 Docs/beta/examples/index.rst create mode 100644 Docs/beta/install/index.rst create mode 100644 Docs/beta/install/quick-start.rst create mode 100644 Docs/beta/rn/index.rst create mode 100644 Docs/beta/ug/features/adaround.rst create mode 100644 Docs/beta/ug/features/autoquant.rst create mode 100644 Docs/beta/ug/features/bn.rst create mode 100644 Docs/beta/ug/features/cle.rst create mode 100644 Docs/beta/ug/features/cp.rst create mode 100644 Docs/beta/ug/features/index.rst create mode 100644 Docs/beta/ug/features/qat.rst create mode 100644 Docs/beta/ug/features/quant_analyzer.rst create mode 100644 Docs/beta/ug/features/quant_sim.rst create mode 100644 Docs/beta/ug/features/spatial_svd.rst create mode 100644 Docs/beta/ug/features/visualization.rst create mode 100644 Docs/beta/ug/features/weight_svd.rst create mode 100644 Docs/beta/ug/index.rst create mode 100644 Docs/beta/ug/optimization/index.rst diff --git a/Docs/beta/_static/style.css b/Docs/beta/_static/style.css index dd30c3fa814..0df77588fe1 100644 --- a/Docs/beta/_static/style.css +++ b/Docs/beta/_static/style.css @@ -1,283 +1,1194 @@ -.rst-content .hideitem { - display: none; +@font-face { + font-family: Roboto; + font-style: normal; + font-weight: 400; + src: local("Roboto"), local("Roboto-Regular"), url(fonts/roboto/roboto.woff2) format("woff2"); } - -nav .hideitem { - display: unset; - font-size: 13px; +@font-face { + font-family: Roboto; + font-style: italic; + font-weight: 400; + src: local("Roboto Italic"), local("Roboto-Italic"), url(fonts/roboto/roboto-italic.woff2) format("woff2"); } - -.hideitem { - font-size: 14px; +@font-face { + font-family: Roboto; + font-style: normal; + font-weight: 700; + src: local("Roboto Bold"), local("Roboto-Bold"), url(fonts/roboto/roboto-bold.woff2) format("woff2"); } - -.rst-content .code-block-caption .headerlink, .rst-content .eqno .headerlink, .rst-content .toctree-wrapper > p.caption .headerlink, .rst-content dl dt .headerlink, .rst-content h1 .headerlink, .rst-content h2 .headerlink, .rst-content h3 .headerlink, .rst-content h4 .headerlink, .rst-content h5 .headerlink, .rst-content h6 .headerlink, .rst-content p.caption .headerlink, .rst-content p .headerlink, .rst-content table > caption .headerlink { - opacity: 0; - font-size: 14px; - font-family: FontAwesome; - margin-left: -8.5em; - position: absolute; - margin-top: -55px; +@font-face { + font-family: Roboto Mono; + font-style: normal; + font-weight: 400; + src: local("Roboto Mono Regular"), local("RobotoMono-Regular"), url(fonts/roboto-mono/roboto-mono.woff2) format("woff2"); } - - +@font-face { + font-family: Roboto Mono; + font-style: italic; + font-weight: 400; + src: local("Roboto Mono Italic"), local("RobotoMono-Italic"), url(fonts/roboto-mono/roboto-mono-italic.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: normal; + font-weight: 700; + src: local("Roboto Mono Bold"), local("RobotoMono-Bold"), url(fonts/roboto-mono/roboto-mono-bold.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: italic; + font-weight: 700; + src: local("Roboto Mono Bold Italic"), local("RobotoMono-BoldItalic"), url(fonts/roboto-mono/roboto-mono-bold-italic.woff2) format("woff2"); +} +/*****************************************************************************/ +/* Typography */ :root { - --aimet-blue: #3253dc; - --aimet-dark-blue: #0000ff; - --aimet-white: #ffffff; - --aimet-border-grey: #e0e0e0; - --aimet-menu-hover: #e3efff; - --aimet-menu-font-active: #0058ff; - --aimet-code-grey: #fafafa; - --aimet-light-blue: #e7f2fa; + --codeBackgroundColor: #f8f8f8; + --inlineCodeBackgroundColor: #f8f8f8; + --codeBlue: #0000ff; + --codeGreen: #008000; + --dividerColor: rgba(0, 0, 0, 0.08); + --faintFontColor: rgba(0, 0, 0, 0.6); + --fontColor: #252630; + --linkColor: #2980b9; + --mainBackgroundColor: white; + --mainNavColor: #3889ce; + --notificationBannerColor: #176bb0; + --searchHighlightColor: #fff150; + --sidebarColor: white; + --navbarHeight: 4rem; } - -/* In main body this sets the background used in the text boxes*/ -div.document { - background-color: var(--aimet-white); +:root[data-mode=darkest] { + --mainBackgroundColor: black; + --sidebarColor: black; + --codeBackgroundColor: rgba(255, 255, 255, 0.1); + --inlineCodeBackgroundColor: rgba(255, 255, 255, 0.1); +} +:root[data-mode=dark] { + --mainBackgroundColor: #242429; + --sidebarColor: #242429; + --codeBackgroundColor: rgba(0, 0, 0, 0.1); + --inlineCodeBackgroundColor: rgba(255, 255, 255, 0.06); +} +:root[data-mode=dark], :root[data-mode=darkest] { + --codeBlue: #77baff; + --codeGreen: #38c038; + --dividerColor: rgba(255, 255, 255, 0.1); + --faintFontColor: rgba(255, 255, 255, 0.6); + --fontColor: white; + --linkColor: #319be0; + --searchHighlightColor: #fe8e04; } -.wy-side-nav-search > div.version { -margin-top: -.4045em; -margin-bottom: .809em; -font-weight: 400; -color: var(--aimet-dark-blue); +body { + font-family: Roboto, "OpenSans", sans-serif; + background-color: var(--mainBackgroundColor); + color: var(--fontColor); } -/* In body this sets the background used on the left and right side of the main content box*/ -.wy-body-for-nav { - background: var(--aimet-white); - background-color: var(--aimet-white); - background-position-x: 0%; - background-position-y: 0%; - background-repeat: repeat; - background-attachment: scroll; - background-image: none; - background-size: auto; - background-origin: padding-box; - background-clip: border-box; +h1 { + font-size: 2rem; } -/* position of main body */ -.wy-grid-for-nav { - position: absolute; - width: 95%; - height: 100%; - display: grid; - justify-content: left; +h2 { + font-size: 1.5rem; } -/* boarder around main content */ -.wy-nav-content-wrap { - margin-left: 300px; - background: var(--aimet-white); - min-height: auto; - border-left-width: 2px; - border-left-style: solid; - border-left-color: var(--aimet-border-grey); - border-right-width: 2px; - border-right-style: solid; - border-right-color: var(--aimet-border-grey); - /* position: fixed; */ +h3 { + font-size: 1.17rem; } -/* Left Menu */ -.wy-nav-side { - position: fixed; - top: 0; - bottom: 0; - left: 0; - padding-bottom: 2em; - width: 300px; - overflow-x: hidden; - overflow-y: hidden; - min-height: 100%; - color: var(--aimet-white); - background: var(--aimet-white); - z-index: 200; -} -.wy-side-nav-search { - display: block; - width: 300px; - padding: .809em; - margin-bottom: .809em; - z-index: 200; - background-color: var(--aimet-white); - text-align: center; - color: var(--aimet-blue); +a { + color: var(--linkColor); + text-decoration: none; } -.wy-nav-content { - padding: 1.618em 3.236em; - height: 100%; - max-width: 1400px; - margin: auto; - background: #ffffff; +/*****************************************************************************/ +html { + height: 100%; + scroll-padding-top: var(--navbarHeight); } -.wy-side-nav-search .wy-dropdown > a, .wy-side-nav-search > a { - color: blue; - font-size: 100%; - font-weight: 700; - display: inline-block; - padding: 4px 6px; - margin-bottom: .809em; - max-width: 100%; +html, +body { + padding: 0; + margin: 0; + min-height: 100%; } -.wy-menu-vertical a:hover button.toctree-expand { - color: black; /* the button color when hover over */ +body { + display: flex; + flex-direction: column; } -.wy-menu-vertical a:hover { +/*****************************************************************************/ +/* Top nav */ +#searchbox h3#searchlabel { + display: none; +} +#searchbox form.search { + display: flex; + flex-direction: row; +} +#searchbox form.search input { + display: block; + box-sizing: border-box; + padding: 0.3rem; + color: rgba(0, 0, 0, 0.7); + border-radius: 0.2rem; +} +#searchbox form.search input[type=text] { + border: none; + background-color: rgba(255, 255, 255, 0.6); + flex-grow: 1; + margin-right: 0.2rem; +} +#searchbox form.search input[type=text]::placeholder { + color: rgba(0, 0, 0, 0.6); +} +#searchbox form.search input[type=submit] { cursor: pointer; - background: var(--aimet-menu-hover); /* color of toctree menu when hovered over */ + color: var(--mainNavColor); + flex-grow: 0; + border: none; + background-color: white; } -/*.wy-menu-vertical ul:hover { - color: #3253dc; -}*/ - -/* set the color behind all toc tree menus */ -.wy-menu-vertical li { - background: var(--aimet-white); - background-color: rgb(255, 255, 255); - background-position-x: 0%; - background-position-y: 0%; - background-repeat: repeat; - background-attachment: scroll; - background-image: none; - background-size: auto; - background-origin: padding-box; - background-clip: border-box; +div#top_nav { + position: fixed; + top: 0; + left: 0; + right: 0; + color: white; + z-index: 100; } - -/* -.wy-menu-vertical li.current { - background: var(--aimet-menu-hover); -} -*/ -.wy-menu-vertical a { - line-height: 18px; - padding: .4045em 1.618em; +div#top_nav div#notification_banner { + background-color: var(--notificationBannerColor); + box-sizing: border-box; + padding: 0.1rem 1rem; + display: flex; + flex-direction: row; + align-items: center; + justify-content: right; +} +div#top_nav div#notification_banner a.close { + flex-grow: 0; + flex-shrink: 0; + color: rgba(255, 255, 255, 0.85); + text-align: right; + font-size: 0.6rem; + text-transform: uppercase; display: block; - position: relative; - font-size: 90%; - /*color: #e8f3f7; /* toctree color - color: #3253dc */ - color: var(--aimet-blue); - + text-decoration: none; + margin-left: 0.5rem; } -.wy-menu-vertical li.current { - background: var(--aimet-white); /* highlist color behind toctree header when current */ +div#top_nav div#notification_banner a.close:hover { + color: white; } - -.wy-menu-vertical li.current > a { - color: var(--aimet-menu-font-active); /* font color when current*/ - font-weight: 700; - position: relative; - background: var(--aimet-menu-hover); /* back ground of the toctree header when current*/ - border: none; - border-top-color: currentcolor; - border-top-style: none; - border-top-width: medium; - border-bottom-color: currentcolor; - border-bottom-style: none; - border-bottom-width: medium; -padding: .4045em 1.618em; -} - -/* menu item color -.wy-menu-vertical li.current a:hover { - background: var(--aimet-menu-hover); +div#top_nav div#notification_banner p { + flex-grow: 1; + margin: 0; + text-align: center; + font-size: 0.9rem; + line-height: 1.2; + padding: 0.4rem 0; } -*/ - -.wy-menu-vertical li.toctree-l1.current > a, .wy-menu-vertical li.toctree-l1.current li.toctree-l2 > a { - background: var(--aimet-white); +div#top_nav div#notification_banner p a { + color: white; + text-decoration: underline; } - -.wy-menu-vertical li.toctree-l1.current > a, .wy-menu-vertical li.toctree-l1.current li.toctree-l2 > a:hover { - background: var(--aimet-menu-hover); +div#top_nav nav { + background-color: var(--mainNavColor); + box-sizing: border-box; + padding: 1rem; + display: flex; + flex-direction: row; + align-items: center; } - -.wy-menu-vertical li.toctree-l2.current > a, .wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a { - background: var(--aimet-white); +div#top_nav nav h1 { + flex-grow: 1; + font-size: 1.2rem; + margin: 0; + padding: 0 0 0 0.8rem; + line-height: 1; } - -.wy-menu-vertical li.toctree-l2.current > a, .wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a:hover { - background: var(--aimet-menu-hover); +div#top_nav nav h1 a { + color: white; } - -.wy-menu-vertical li.toctree-l3.current > a, .wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a { - background: var(--aimet-white); +div#top_nav nav h1 img { + height: 1.3rem; + width: auto; } - -.wy-menu-vertical li.toctree-l3.current > a, .wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a:hover { - background: var(--aimet-menu-hover); +div#top_nav nav p#toggle_sidebar { + transform: rotate(90deg); + letter-spacing: 0.1rem; + flex-grow: 0; + margin: 0; + padding: 0; } - -.wy-menu-vertical li.toctree-l3, .wy-menu-vertical li.toctree-l4 { - font-size: 14px; +div#top_nav nav p#toggle_sidebar a { + color: white; + font-weight: bold; } - -/* - * Highlight color for code segments - */ -.highlight, .literal-block -{ - background: var(--aimet-code-grey); +div#top_nav nav a#mode_toggle, div#top_nav nav a#source_link { + margin-right: 1rem; + display: block; + flex-grow: 0; +} +div#top_nav nav a#mode_toggle svg, div#top_nav nav a#source_link svg { + height: 1.3rem; + width: 1.3rem; + vertical-align: middle; +} +div#top_nav nav p.mobile_search_link { + margin: 0; +} +@media (min-width: 50rem) { + div#top_nav nav p.mobile_search_link { + display: none; + } +} +div#top_nav nav p.mobile_search_link a { + color: white; +} +div#top_nav nav p.mobile_search_link a svg { + height: 1rem; + vertical-align: middle; +} +@media (max-width: 50rem) { + div#top_nav nav div.searchbox_wrapper { + display: none; + } +} +div#top_nav nav div.searchbox_wrapper #searchbox { + align-items: center; + display: flex !important; + flex-direction: row-reverse; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link { + margin: 0 0.5rem 0 0; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link a { + color: rgba(255, 255, 255, 0.8); + font-size: 0.8em; + padding-right: 0.5rem; + text-decoration: underline; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link a:hover { + color: white; } -/* - * Table options -*/ - -/* Use root font size for generated table */ -html.writer-html5 .rst-content table.docutils td>p, html.writer-html5 .rst-content table.docutils th>p { - border: 0 solid var(--aimet-border-grey); - line-height: 1.5rem; - font-size: 1rem; +/*****************************************************************************/ +/* Main content */ +div.document { + flex-grow: 1; + margin-top: 2rem; + margin-bottom: 5rem; + margin-left: 15rem; + margin-right: 15rem; + padding-top: var(--navbarHeight); + /***************************************************************************/ + /***************************************************************************/ +} +@media (max-width: 50rem) { + div.document { + margin-left: 0px; + margin-right: 0px; + } +} +div.document section, +div.document div.section { + margin: 4rem 0; +} +div.document section:first-child, +div.document div.section:first-child { + margin-top: 0; +} +div.document section > section, +div.document div.section > div.section { + margin: 4rem 0; +} +div.document section > section > section, +div.document div.section > div.section > div.section { + margin: 2rem 0 0 0; +} +div.document section > section > section > section, +div.document div.section > div.section > div.section > div.section { + margin: 1.5rem 0 0 0; +} +div.document h1 + section, +div.document h1 + div.section { + margin-top: 2.5rem !important; +} +div.document h2 + section, +div.document h2 + div.section { + margin-top: 1.5rem !important; +} +div.document img { + max-width: 100%; +} +div.document code { + padding: 2px 4px; + background-color: var(--inlineCodeBackgroundColor); + border-radius: 0.2rem; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9em; +} +div.document div.documentwrapper { + max-width: 45rem; + margin: 0 auto; + flex-grow: 1; + box-sizing: border-box; + padding: 1rem; +} +div.document div.highlight { + color: #252630; + box-sizing: border-box; + padding: 0.2rem 1rem; + margin: 0.5rem 0; + border-radius: 0.2rem; + font-size: 0.9rem; +} +div.document div.highlight pre { + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; +} +div.document div[class*=highlight] { + overflow-x: auto; +} +div.document a.headerlink { + font-size: 0.6em; + display: none; + padding-left: 0.5rem; + vertical-align: middle; +} +div.document h1, +div.document h2, +div.document h3, +div.document h4, +div.document h5, +div.document h6, +div.document str, +div.document b { + font-weight: 700; +} +div.document h1 { + margin: 0.8rem 0 0.5rem 0; +} +div.document h2 { + margin: 0.8rem 0 0.5rem 0; +} +div.document h3, div.document h4 { + margin: 1rem 0 0.5rem 0; +} +div.document h1:hover a.headerlink, +div.document h2:hover a.headerlink, +div.document h3:hover a.headerlink, +div.document h4:hover a.headerlink { + display: inline-block; +} +div.document p, +div.document li { + font-size: 1rem; + line-height: 1.5; +} +div.document li p { + margin: 0 0 0.5rem 0; +} +div.document ul, div.document ol { + padding-left: 2rem; +} +div.document ol.loweralpha { + list-style: lower-alpha; +} +div.document ol.arabic { + list-style: decimal; +} +div.document ol.lowerroman { + list-style: lower-roman; +} +div.document ol.upperalpha { + list-style: upper-alpha; +} +div.document ol.upperroman { + list-style: upper-roman; +} +div.document dd { + margin-left: 1.5rem; +} +div.document hr { + border: none; + height: 1px; + background-color: var(--dividerColor); + margin: 2rem 0; +} +div.document table.docutils { + border-collapse: collapse; +} +div.document table.docutils th, div.document table.docutils td { + border: 1px solid var(--dividerColor); + box-sizing: border-box; + padding: 0.5rem 1rem; +} +div.document table.docutils th p, div.document table.docutils th ul, div.document table.docutils td p, div.document table.docutils td ul { + margin: 0.3rem 0; +} +div.document table.docutils th ul, div.document table.docutils td ul { + padding-left: 1rem; +} +div.document form input { + padding: 0.5rem; +} +div.document form input[type=submit], div.document form button { + border: none; + background-color: var(--mainNavColor); + color: white; + padding: 0.5rem 1rem; + border-radius: 0.2rem; +} +div.document span.highlighted { + background-color: var(--searchHighlightColor); + padding: 0 0.1em; +} +div.document div#search-results { + padding-top: 2rem; +} +div.document div#search-results p.search-summary { + font-size: 0.8em; +} +div.document div#search-results ul.search { + list-style: none; + padding-left: 0; +} +div.document div#search-results ul.search li { + border-bottom: 1px solid var(--dividerColor); + margin: 0; + padding: 2rem 0; +} +div.document div#search-results ul.search li > a:first-child { + font-size: 1.2rem; +} +div.document dd ul, div.document dd ol { + padding-left: 1rem; +} +div.document dl.py { + margin-bottom: 2rem; +} +div.document dl.py dt.sig { + background-color: var(--codeBackgroundColor); + color: var(--fontColor); + box-sizing: border-box; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9rem; + padding: 1rem; + border-left: 5px solid rgba(0, 0, 0, 0.1); + border-radius: 0.2rem; +} +div.document dl.py em.property { + color: var(--sidebarColor); + font-weight: bold; +} +div.document dl.py span.sig-name { + color: var(--codeBlue); + font-weight: bold; +} +div.document dl.py em.property { + color: var(--codeGreen); +} +div.document dl.py em.sig-param { + margin-left: 2rem; +} +div.document dl.py em.sig-param span.default_value { + color: var(--codeGreen); +} +div.document dl.py span.sig-return span.sig-return-typehint { + color: var(--fontColor); +} +div.document dl.py span.sig-return span.sig-return-typehint pre { + color: var(--fontColor); +} +div.document dl.py em.sig-param > span:first-child { + font-weight: bold; +} +div.document dl.cpp, div.document dl.c { + margin-bottom: 1rem; +} +div.document dl.cpp dt.sig, div.document dl.c dt.sig { + background-color: var(--codeBackgroundColor); + color: var(--fontColor); + box-sizing: border-box; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9rem; + padding: 1rem; + border-left: 5px solid rgba(0, 0, 0, 0.1); + border-radius: 0.2rem; + line-height: 1.4; +} +div.document dl.cpp span.sig-name, div.document dl.c span.sig-name { + color: var(--codeBlue); + font-weight: bold; +} +div.document dl.cpp span.sig-indent, div.document dl.c span.sig-indent { + margin-left: 2rem; +} +div.document dl.cpp span.target + span, div.document dl.c span.target + span { + color: var(--codeGreen); +} +div.document dl.cpp span.sig-param > span:first-child, div.document dl.c span.sig-param > span:first-child { + font-weight: bold; +} +div.document div.admonition { + box-shadow: 0px 0px 0px 1px var(--dividerColor); + border-radius: 0.2rem; + margin: 1rem 0; + overflow: hidden; +} +div.document div.admonition p { + box-sizing: border-box; + font-size: 0.9rem; + padding: 0.5rem; + margin: 0; +} +div.document div.admonition p:first-child { + padding-bottom: 0; + margin-bottom: 0; +} +div.document div.admonition p + p { + padding-top: 0.2rem; +} +div.document div.admonition p.admonition-title { + font-weight: bolder; + letter-spacing: 0.01rem; +} +div.document div.admonition.hint, div.document div.admonition.important, div.document div.admonition.tip { + border-left: 5px solid #56b79c; +} +div.document div.admonition.hint p.admonition-title, div.document div.admonition.important p.admonition-title, div.document div.admonition.tip p.admonition-title { + color: #56b79c; +} +div.document div.admonition.note { + border-left: 5px solid #587f9f; +} +div.document div.admonition.note p.admonition-title { + color: #587f9f; +} +div.document div.admonition.danger, div.document div.admonition.error { + border-left: 5px solid #e6a39a; +} +div.document div.admonition.danger p.admonition-title, div.document div.admonition.error p.admonition-title { + color: #e6a39a; +} +div.document div.admonition.attention, div.document div.admonition.caution, div.document div.admonition.warning { + border-left: 5px solid #e7b486; +} +div.document div.admonition.attention p.admonition-title, div.document div.admonition.caution p.admonition-title, div.document div.admonition.warning p.admonition-title { + color: #e7b486; } -/* Remove internal bottom borders */ -.rst-content table.docutils td, .wy-table-bordered-all td { - border-left-width: 1px; - border-bottom-width: 0; +/*****************************************************************************/ +/* Sidebar */ +div.sphinxsidebar { + background-color: var(--sidebarColor); + border-right: 1px solid var(--dividerColor); + position: fixed; + left: 0; + top: 0; + bottom: 0; + width: 15rem; + box-sizing: border-box; + padding: var(--navbarHeight) 1rem 1rem; + z-index: 50; +} +@media (max-width: 50rem) { + div.sphinxsidebar { + display: none; + } +} +div.sphinxsidebar div.sphinxsidebarwrapper { + height: 100%; + overflow-y: auto; +} +div.sphinxsidebar ul { + padding-left: 0rem; + list-style: none; +} +div.sphinxsidebar ul li { + font-size: 0.9rem; + line-height: 1.2; +} +div.sphinxsidebar ul li a { + display: block; + box-sizing: border-box; + padding: 0 0.2rem 0.6rem; + color: var(--fontColor); + text-decoration: none; +} +div.sphinxsidebar ul li a.current { + color: var(--linkColor); +} +div.sphinxsidebar ul li a:hover { + color: var(--linkColor); +} +div.sphinxsidebar ul li > ul { + padding-left: 1rem; +} +div.sphinxsidebar p { + color: var(--faintFontColor); } -/* Keep outside borders */ -.rst-content table.docutils, .wy-table-bordered-all { - border: 1px solid var(--aimet-border-grey); +/*****************************************************************************/ +/* The right sidebar, showing the table of contents for the current page. */ +div#show_right_sidebar { + position: fixed; + right: 0; + top: 0; + z-index: 20; + background-color: var(--sidebarColor); + border-left: 1px solid var(--dividerColor); + border-bottom: 1px solid var(--dividerColor); + padding: var(--navbarHeight) 1rem 0rem; +} +div#show_right_sidebar p { + font-size: 0.9em; +} +div#show_right_sidebar p span { + color: var(--faintFontColor); + vertical-align: middle; +} +div#show_right_sidebar p span.icon { + color: var(--linkColor); + font-size: 0.9em; + padding-right: 0.2rem; } -/* Set text color of side navigation TOC section headers */ -.wy-menu-vertical header, .wy-menu-vertical p.caption { - color: var(--aimet-dark-blue); +div#right_sidebar { + position: fixed; + right: 0; + top: 0; + z-index: 50; + background-color: var(--sidebarColor); + width: 15rem; + border-left: 1px solid var(--dividerColor); + box-sizing: border-box; + padding: var(--navbarHeight) 1rem 1rem; + height: 100%; + overflow-y: auto; +} +div#right_sidebar p span { + color: var(--faintFontColor); + vertical-align: middle; +} +div#right_sidebar p span.icon { + color: var(--linkColor); + font-size: 0.9em; + padding-right: 0.2rem; +} +div#right_sidebar ul { + padding-left: 0rem; + list-style: none; +} +div#right_sidebar ul li { + font-size: 0.9rem; + line-height: 1.2; +} +div#right_sidebar ul li a { + display: block; + box-sizing: border-box; + padding: 0 0.2rem 0.6rem; + color: var(--fontColor); + text-decoration: none; +} +div#right_sidebar ul li a.current { + color: var(--linkColor); +} +div#right_sidebar ul li a:hover { + color: var(--linkColor); +} +div#right_sidebar ul li > ul { + padding-left: 1rem; +} +div#right_sidebar p { + color: var(--faintFontColor); +} +@media (max-width: 50rem) { + div#right_sidebar { + display: none; + } } +/*****************************************************************************/ +/* Footer */ +div.footer { + box-sizing: border-box; + padding-top: 2rem; + font-size: 0.7rem; + text-align: center; + text-transform: uppercase; + color: var(--faintFontColor); +} -/* - * Configure the appearance of code output box. - * Values are based on sphinx-gallery configuration -*/ -.script-output { - color: black; - display: flex; - gap: 0.5em; +p#theme_credit { + font-size: 0.6rem; + text-transform: uppercase; + text-align: center; + color: var(--faintFontColor); } -.script-output::before { - content: "Out:"; - line-height: 1.4; - padding-top: 10px; +/*****************************************************************************/ +/* Buttons */ +div.button_nav_wrapper { + margin-left: 15rem; + margin-right: 15rem; +} +@media (max-width: 50rem) { + div.button_nav_wrapper { + margin-left: 0px; + margin-right: 0px; + } +} +div.button_nav_wrapper div.button_nav { + max-width: 45rem; + margin: 0 auto; + display: flex; + flex-direction: row; + width: 100%; +} +div.button_nav_wrapper div.button_nav div { + box-sizing: border-box; + padding: 1rem; + flex: 50%; +} +div.button_nav_wrapper div.button_nav div a { + display: block; +} +div.button_nav_wrapper div.button_nav div a span { + vertical-align: middle; +} +div.button_nav_wrapper div.button_nav div a span.icon { + font-weight: bold; + font-size: 0.8em; +} +div.button_nav_wrapper div.button_nav div.left a { + text-align: left; +} +div.button_nav_wrapper div.button_nav div.left a span.icon { + padding-right: 0.4rem; +} +div.button_nav_wrapper div.button_nav div.right a { + text-align: right; +} +div.button_nav_wrapper div.button_nav div.right a span.icon { + padding-left: 0.4rem; } -.script-output .highlight { - background: transparent; - flex-grow: 1; - overflow: auto; - /* Allow output block to take up maximum 25 lines */ - max-height: 25em; +/*****************************************************************************/ +/* Pygments overrides in dark mode */ +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight { + --black: #000000; + --red: #ff9393; + --darkBlue: #6b83fe; + --grey: #a8a8a8; + --pink: #ff99d8; + --torquoise: #68e9e9; + --brown: #d48a00; + --purple: #ce04e9; + --paleYellow: #454534; + background: var(--codeBackgroundColor); + color: var(--fontColor); + /* Comment */ + /* Error */ + /* Keyword */ + /* Operator */ + /* Comment.Hashbang */ + /* Comment.Multiline */ + /* Comment.Preproc */ + /* Comment.PreprocFile */ + /* Comment.Single */ + /* Comment.Special */ + /* Generic.Deleted */ + /* Generic.Emph */ + /* Generic.Error */ + /* Generic.Heading */ + /* Generic.Inserted */ + /* Generic.Output */ + /* Generic.Prompt */ + /* Generic.Strong */ + /* Generic.Subheading */ + /* Generic.Traceback */ + /* Keyword.Constant */ + /* Keyword.Declaration */ + /* Keyword.Namespace */ + /* Keyword.Pseudo */ + /* Keyword.Reserved */ + /* Keyword.Type */ + /* Literal.Number */ + /* Literal.String */ + /* Name.Attribute */ + /* Name.Builtin */ + /* Name.Class */ + /* Name.Constant */ + /* Name.Decorator */ + /* Name.Entity */ + /* Name.Exception */ + /* Name.Function */ + /* Name.Label */ + /* Name.Namespace */ + /* Name.Tag */ + /* Name.Variable */ + /* Operator.Word */ + /* Text.Whitespace */ + /* Literal.Number.Bin */ + /* Literal.Number.Float */ + /* Literal.Number.Hex */ + /* Literal.Number.Integer */ + /* Literal.Number.Oct */ + /* Literal.String.Affix */ + /* Literal.String.Backtick */ + /* Literal.String.Char */ + /* Literal.String.Delimiter */ + /* Literal.String.Doc */ + /* Literal.String.Double */ + /* Literal.String.Escape */ + /* Literal.String.Heredoc */ + /* Literal.String.Interpol */ + /* Literal.String.Other */ + /* Literal.String.Regex */ + /* Literal.String.Single */ + /* Literal.String.Symbol */ + /* Name.Builtin.Pseudo */ + /* Name.Function.Magic */ + /* Name.Variable.Class */ + /* Name.Variable.Global */ + /* Name.Variable.Instance */ + /* Name.Variable.Magic */ + /* Literal.Number.Integer.Long */ +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight pre, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight pre { + line-height: 125%; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight td.linenos .normal, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight td.linenos .normal { + color: inherit; + background-color: transparent; + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight span.linenos, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight span.linenos { + color: inherit; + background-color: transparent; + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight td.linenos .special, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight td.linenos .special { + color: var(--black); + background-color: var(--paleYellow); + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight span.linenos.special, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight span.linenos.special { + color: var(--black); + background-color: var(--paleYellow); + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .hll, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .hll { + background-color: var(--paleYellow); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .c, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .c { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .err, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .err { + border: 1px solid var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .k, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .k { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .o, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .o { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ch, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ch { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cm { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cp { + color: var(--brown); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cpf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cpf { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .c1, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .c1 { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cs, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cs { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gd { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ge, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ge { + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gr { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gh { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gi { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .go, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .go { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gp { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gs, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gs { + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gu, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gu { + color: var(--purple); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gt { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kc { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kd { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kn, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kn { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kp { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kr { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kt { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .m, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .m { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .na, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .na { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nb { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nc { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .no, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .no { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nd { + color: var(--purple); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ni, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ni { + color: var(--grey); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ne, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ne { + color: var(--red); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nf { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nl, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nl { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nn, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nn { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nt { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nv, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nv { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ow, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ow { + color: var(--pink); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .w, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .w { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mb { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mf { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mh { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mi { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mo, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mo { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sa, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sa { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sb { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sc { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .dl, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .dl { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sd { + color: var(--red); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s2, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s2 { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .se, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .se { + color: var(--brown); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sh { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .si, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .si { + color: var(--pink); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sx, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sx { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sr { + color: var(--pink); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s1, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s1 { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ss, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ss { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .bp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .bp { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .fm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .fm { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vc { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vg, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vg { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vi { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vm { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .il, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .il { + color: var(--grey); } +/*# sourceMappingURL=basic_mod.css.map */ diff --git a/Docs/beta/api/index.rst b/Docs/beta/api/index.rst new file mode 100644 index 00000000000..af1bf25ef41 --- /dev/null +++ b/Docs/beta/api/index.rst @@ -0,0 +1,5 @@ +.. _api-index: + +######### +AIMET API +######### \ No newline at end of file diff --git a/Docs/beta/conf.py b/Docs/beta/conf.py index f2eba1f9acd..8991dd34b86 100644 --- a/Docs/beta/conf.py +++ b/Docs/beta/conf.py @@ -59,9 +59,9 @@ author = 'Qualcomm Innovation Center, Inc.' # The short X.Y version -version = '' +version = '2.0' # The full version, including alpha/beta/rc tags -release = '' +release = '2.0' if "SW_VERSION" in os.environ: version = os.environ['SW_VERSION'] else: @@ -77,7 +77,7 @@ def setup(app): # If your documentation needs a minimal Sphinx version, state it here. # -# needs_sphinx = '1.0' +# needs_sphinx = '5.3.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -92,7 +92,9 @@ def setup(app): 'sphinx.ext.mathjax', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode', - 'sphinx.ext.napoleon' + 'sphinx.ext.napoleon', + 'sphinx_tabs.tabs', + 'sphinx_design' ] # Add any paths that contain templates here, relative to this directory. @@ -105,7 +107,7 @@ def setup(app): source_suffix = '.rst' # The master toctree document. -master_doc = 'index' +root_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -130,20 +132,24 @@ def setup(app): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' -html_title = 'AI Model Efficiency Toolkit Documentation: ver ' + version -html_logo = 'images/brain_logo.png' +html_theme = 'piccolo_theme' +html_title = 'AI Model Efficiency Toolkit Documentation version ' + version +html_short_title = 'AIMET Docs v. ' + version +# html_logo = 'images/brain_logo.png' +# html_favicon = 'images/brain_logo16.png' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { + 'banner_hiding': "temporary", + 'banner_text': "", 'display_version': True, - 'prev_next_buttons_location': 'both', - 'collapse_navigation': False, - 'sticky_navigation': True, - 'navigation_depth': 10 + 'source_url': "https://github.com/quic/aimet", + 'globaltoc_includehidden': True, + 'globaltoc_maxdepth': 2, + 'globaltoc_collapse': False } # Add any paths that contain custom static files (such as style sheets) here, @@ -160,8 +166,7 @@ def setup(app): # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # -html_sidebars = {'**': ['localtoc.html', 'relations.html', 'searchbox.html']} - +html_sidebars = {'**': ['globaltoc.html', 'localtoc.html', 'searchbox.html'] } # -- Options for HTMLHelp output --------------------------------------------- @@ -200,7 +205,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'AIMET.tex', 'AI Model Efficiency Toolkit Documentation', + (root_doc, 'AIMET.tex', 'AI Model Efficiency Toolkit Documentation', 'Qualcomm Innovation Center, Inc.', 'manual'), ] @@ -210,7 +215,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'aimet', 'AI Model Efficiency Toolkit Documentation', + (root_doc, 'aimet', 'AI Model Efficiency Toolkit Documentation', [author], 1) ] @@ -221,7 +226,7 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'AIMET', 'AI Model Efficiency Toolkit Documentation', + (root_doc, 'AIMET', 'AI Model Efficiency Toolkit Documentation', author, 'AIMET', 'One line description of project.', 'Miscellaneous'), ] @@ -233,6 +238,16 @@ def setup(app): nbsphinx_allow_errors = True nbsphinx_execute = 'never' +docs_root_url = "https://quic.github.io/aimet-pages/releases/latest/" + +# Version here refers to the AIMET torch v1/v2 version, not the AIMET release number +html_context = { + 'current_version' : "Universal", + 'versions' : [["Universal", docs_root_url + "features/index.html"], + ["PyTorch", docs_root_url + "torch_v2/torch_docs/index.html"]], + 'display_version_tab': False +} + autosummary_generate = False # contains a list of modules to be mocked up which are not available during docs build time diff --git a/Docs/beta/examples/index.rst b/Docs/beta/examples/index.rst new file mode 100644 index 00000000000..5da28a64261 --- /dev/null +++ b/Docs/beta/examples/index.rst @@ -0,0 +1,5 @@ +.. _examples-index: + +######### +Examples +######### \ No newline at end of file diff --git a/Docs/beta/index.rst b/Docs/beta/index.rst index 3716593b4f6..5c4f111f9e5 100644 --- a/Docs/beta/index.rst +++ b/Docs/beta/index.rst @@ -1,3 +1,68 @@ -============ -Hello World! -============ +.. _top-index: + + +###################################### +AI Model Efficiency Toolkit User Guide +###################################### + +.. toctree:: + :hidden: + + Quick Start <../install/quick-start> + Installation <../install/index> + AIMET User Guide <../ug/index> + Examples <../examples/index> + API Reference <../api/index> + Release Notes <../rn/index> + +AI Model Efficiency Toolkit (AIMET) is a software toolkit for quantizing and compresing models. + +The goal of optimizing a model is to enable its use on an edge device such as a mobile phone or laptop. + +AIMET uses post-training and fine tuning techniques to optimize trained models in ways that minimize accuracy loss incurred during quantization or compression. + +AIMET supports PyTorch, TensorFlow, and Keras models, and ONNX models with limited functionality. + +.. grid:: 1 + + .. grid-item-card:: Quick Start + :link: install-quick-start + :link-type: ref + + To install and get started as quickly as possibly using AIMET with PyTorch, see the **Quick Start guide**. + + .. grid-item-card:: Installation + :link: install-index + :link-type: ref + + For other install options, including for TensorFlow and ONNX platforms or to run AIMET in a Docker container, see Installation. + + .. grid-item-card:: User Guide + :link: ug-index + :link-type: ref + + For a technical overview of AIMET, and to see step by step how to apply AIMET techniques, see the AIMET User Guide. + + .. grid-item-card:: Examples + :link: examples-index + :link-type: ref + + To view end-to-end examples of model quantization and compression, and to download the examples in Jupyter notebook format, see Examples. + + .. grid-item-card:: API + :link: api-index + :link-type: ref + + For a detailed look at the AIMET API, see the API Reference. + + .. grid-item-card:: Release Notes + :link: rn-index + :link-type: ref + + For information specific to this release, see :ref:`Release Notes <../rn/index>`. + + +| |project| is a product of |author| +| Qualcomm\ |reg| Neural Processing SDK is a product of Qualcomm Technologies, Inc. and/or its subsidiaries. + +.. |reg| unicode:: U+000AE .. REGISTERED SIGN diff --git a/Docs/beta/install/index.rst b/Docs/beta/install/index.rst new file mode 100644 index 00000000000..a23bb147a59 --- /dev/null +++ b/Docs/beta/install/index.rst @@ -0,0 +1,5 @@ +.. _install-index: + +######### +Installation +######### \ No newline at end of file diff --git a/Docs/beta/install/quick-start.rst b/Docs/beta/install/quick-start.rst new file mode 100644 index 00000000000..f6a5cd22bfd --- /dev/null +++ b/Docs/beta/install/quick-start.rst @@ -0,0 +1,5 @@ +.. _install-quick-start: + +######### +Quick Start +######### \ No newline at end of file diff --git a/Docs/beta/rn/index.rst b/Docs/beta/rn/index.rst new file mode 100644 index 00000000000..899618b8012 --- /dev/null +++ b/Docs/beta/rn/index.rst @@ -0,0 +1,5 @@ +.. _rn-index: + +######### +Release Notes +######### \ No newline at end of file diff --git a/Docs/beta/ug/features/adaround.rst b/Docs/beta/ug/features/adaround.rst new file mode 100644 index 00000000000..60e81534954 --- /dev/null +++ b/Docs/beta/ug/features/adaround.rst @@ -0,0 +1,102 @@ +.. _feature-adaround: + +################# +Adaptive rounding +################# + +Context +======= + +.. include:: ../user_guide/adaround.rst + :start-after: adaround-context + :end-before: adaround-api + +Prerequisites +------------- + +Model, GPU, CUDA, dataloaders, dependencies. + +Workflow +-------- + +Step 1 +~~~~~~ + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + +Step 2 +~~~~~~ + +... and so on. + + +Results +------- + +Optional. + +AdaRound should result in improved accuracy, but does not guaranteed sufficient improvement. + + +Next steps +---------- + +If AdaRound resulted in satisfactory accuracy, export the model. + +.. tabs:: + + .. tab:: PyTorch + + Link to PyTorch export procedure. + + .. tab:: TensorFlow + + Link to TensorFlow export procedure. + + .. tab:: ONNX + + Link to ONNX export procedure. + +If the model is still not accurate enough, the next step is typically to try :doc:`quantization-aware training `. + + +API +=== + +.. tabs:: + + .. tab:: PyTorch + + :ref:`PyTorch API ` + + .. tab:: TensorFlow + + :ref:`Keras API ` + + .. tab:: ONNX + + :ref:`ONNX API ` diff --git a/Docs/beta/ug/features/autoquant.rst b/Docs/beta/ug/features/autoquant.rst new file mode 100644 index 00000000000..b448cd17cc0 --- /dev/null +++ b/Docs/beta/ug/features/autoquant.rst @@ -0,0 +1,47 @@ +.. _feature-autoquant: + +################# +Automatic quantization +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/bn.rst b/Docs/beta/ug/features/bn.rst new file mode 100644 index 00000000000..d916934ef5e --- /dev/null +++ b/Docs/beta/ug/features/bn.rst @@ -0,0 +1,47 @@ +.. _feature-bn: + +################# +Batch norm re-estimation +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/cle.rst b/Docs/beta/ug/features/cle.rst new file mode 100644 index 00000000000..99216108dd3 --- /dev/null +++ b/Docs/beta/ug/features/cle.rst @@ -0,0 +1,47 @@ +.. _feature-cle: + +################# +Cross-layer equalization +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/cp.rst b/Docs/beta/ug/features/cp.rst new file mode 100644 index 00000000000..b5a08313b0d --- /dev/null +++ b/Docs/beta/ug/features/cp.rst @@ -0,0 +1,47 @@ +.. _feature-cp: + +################# +Channel pruning +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/index.rst b/Docs/beta/ug/features/index.rst new file mode 100644 index 00000000000..5f4cacb0080 --- /dev/null +++ b/Docs/beta/ug/features/index.rst @@ -0,0 +1,141 @@ +.. _features-index: + +################ +Optimization Techniques +################ + +.. toctree:: + :hidden: + + Quantization aware training (QAT) + Automatic quantization (AutoQuant) + Adaptive rounding (Adaround) + Cross-layer equalization (CLE) + Batch norm re-estimation (BN) + Quantization analyzer (QuantAnalyzer) + Visualization + Weight singular value decomposition (Weight SVD) + Spatial singular value decomposition (Spatial SVD) + Channel pruning (CP) + + +Quantization aware training (QAT) +================== + +.. grid:: 1 + + .. grid-item-card:: Quantization aware training (QAT) + :link: feature-qat + :link-type: ref + + QAT fine-tunes the model parameters in the presence of quantization noise. + + +Automatic quantization (AutoQuant) +================== + +.. grid:: 1 + + .. grid-item-card:: Automatic quantization (AutoQuant) + :link: feature-autoquant + :link-type: ref + + AutoQuant analyzes the model, determines the best sequence of AIMET post-training quantization techniques, and applies these techniques. + + + + +Adaptive rounding (Adaround) +================== + +.. grid:: 1 + + .. grid-item-card:: Adaptive rounding (Adaround) + :link: feature-adaround + :link-type: ref + + AdaRound uses training data to improve accuracy over naïve rounding. + + +Cross-layer equalization (CLE) +================== + +.. grid:: 1 + + .. grid-item-card:: Cross-layer equalization (CLE) + :link: feature-cle + :link-type: ref + + CLE scales the parameter ranges across different channels to increase the range for layers with low range and reduce range for layers with high range, enabling the same quantizaion parameters to be used across all channels. + + +Batch norm re-estimation (BN) +================== + +.. grid:: 1 + + .. grid-item-card:: Batch norm re-estimation (BN) + :link: feature-bn + :link-type: ref + + BN re-estimated statistics are used to adjust the quantization scale parameters of preceeding Convolution or Linear layers, effectively folding the BN layers. + + +Quantization analyzer (QuantAnalyzer) +================== + +.. grid:: 1 + + .. grid-item-card:: Quantization analyzer (QuantAnalzer) + :link: feature-quant-analyzer + :link-type: ref + + QuantAnalyzer automatically identify sensitive areas and hotspots in the model. + + +Visualization +================== + +.. grid:: 1 + + .. grid-item-card:: Visualization + :link: feature-visualization + :link-type: ref + + QuantAnalyzer automatically identify sensitive areas and hotspots in the model. + + +Weight singular value decomposition (Weight SVD) +================== + +.. grid:: 1 + + .. grid-item-card:: Weight singular value decomposition (Weight SVD) + :link: feature-weight-svd + :link-type: ref + + Weight SVD decomposes one large MAC or memory layer into two smaller layers. + + +Spatial singular value decomposition (Spatial SVD) +================== + +.. grid:: 1 + + .. grid-item-card:: Spatial singular value decomposition (Spatial SVD) + :link: feature-spatial-svd + :link-type: ref + + Spatial SVD decomposes one large convolution (Conv) MAC or memory layer into two smaller layers. + + +Channel pruning (CP) +================== + +.. grid:: 1 + + .. grid-item-card:: Channel pruning (CP) + :link: feature-cp + :link-type: ref + + CP removes less-important input channels from 2D convolution layers. diff --git a/Docs/beta/ug/features/qat.rst b/Docs/beta/ug/features/qat.rst new file mode 100644 index 00000000000..ed69ee373a8 --- /dev/null +++ b/Docs/beta/ug/features/qat.rst @@ -0,0 +1,47 @@ +.. _feature-qat: + +################# +Quantization aware training +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/quant_analyzer.rst b/Docs/beta/ug/features/quant_analyzer.rst new file mode 100644 index 00000000000..fd6556d536d --- /dev/null +++ b/Docs/beta/ug/features/quant_analyzer.rst @@ -0,0 +1,47 @@ +.. _feature-quant-analyzer: + +################# +Quantization analyzer +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/quant_sim.rst b/Docs/beta/ug/features/quant_sim.rst new file mode 100644 index 00000000000..142d78bf5dd --- /dev/null +++ b/Docs/beta/ug/features/quant_sim.rst @@ -0,0 +1,47 @@ +.. _feature-quant-sim: + +################# +Quantization simulation +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/spatial_svd.rst b/Docs/beta/ug/features/spatial_svd.rst new file mode 100644 index 00000000000..50a73e2adee --- /dev/null +++ b/Docs/beta/ug/features/spatial_svd.rst @@ -0,0 +1,47 @@ +.. _feature-spatial-svd: + +################# +Spatial SVD +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/visualization.rst b/Docs/beta/ug/features/visualization.rst new file mode 100644 index 00000000000..35cc5442e78 --- /dev/null +++ b/Docs/beta/ug/features/visualization.rst @@ -0,0 +1,47 @@ +.. _feature-visualization: + +################# +Visualization +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/features/weight_svd.rst b/Docs/beta/ug/features/weight_svd.rst new file mode 100644 index 00000000000..482a4ae8ae8 --- /dev/null +++ b/Docs/beta/ug/features/weight_svd.rst @@ -0,0 +1,47 @@ +.. _feature-weight-svd: + +################# +Weight SVD +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/ug/index.rst b/Docs/beta/ug/index.rst new file mode 100644 index 00000000000..69c6c3190dc --- /dev/null +++ b/Docs/beta/ug/index.rst @@ -0,0 +1,17 @@ +.. _ug-index: + +######### +AIMET User Guide +######### + +.. toctree:: + :hidden: + + Model Optimization + Optimization Techniques + +This user guide is organized into the following sections: + +**Model optimization** describes the recommended workflow for quantizing and compressing models using AIMET. + +**Techniques** describes each quantization and compression technique in detail and gives procedures for using each one. \ No newline at end of file diff --git a/Docs/beta/ug/optimization/index.rst b/Docs/beta/ug/optimization/index.rst new file mode 100644 index 00000000000..1f897969f7a --- /dev/null +++ b/Docs/beta/ug/optimization/index.rst @@ -0,0 +1,3 @@ +######### +Model Optimization +######### From cc4908ed52c1f6ab72b389e5a9f44774c4e36525 Mon Sep 17 00:00:00 2001 From: Dave Welsch Date: Thu, 7 Nov 2024 13:23:06 -0800 Subject: [PATCH 2/3] Fill out folder structure for all TOC sections. Complete AdaRound example. Signed-off-by: Dave Welsch --- Docs/beta/_static/style.css | 31 ++- Docs/beta/{api => apiref}/index.rst | 2 +- Docs/beta/apiref/onnx/adaround.rst | 21 ++ Docs/beta/apiref/tensorflow/adaround.rst | 22 +++ Docs/beta/apiref/torch/adaround.rst | 37 ++++ Docs/beta/apiref/torch/model_preparer.rst | 153 ++++++++++++++ Docs/beta/conf.py | 1 + Docs/beta/featureguide/adaround.rst | 187 ++++++++++++++++++ .../features => featureguide}/autoquant.rst | 2 +- .../beta/{ug/features => featureguide}/bn.rst | 2 +- .../{ug/features => featureguide}/cle.rst | 2 +- .../beta/{ug/features => featureguide}/cp.rst | 2 +- Docs/beta/featureguide/index.rst | 69 +++++++ .../{ug/features => featureguide}/qat.rst | 2 +- .../quant_analyzer.rst | 2 +- .../features => featureguide}/quant_sim.rst | 2 +- .../features => featureguide}/spatial_svd.rst | 2 +- .../visualization.rst | 2 +- .../features => featureguide}/weight_svd.rst | 2 +- Docs/beta/images/adaround.png | Bin 0 -> 36262 bytes Docs/beta/index.rst | 64 +++--- Docs/beta/opt-guide/compression/index.rst | 5 + Docs/beta/opt-guide/index.rst | 21 ++ Docs/beta/opt-guide/overview.rst | 5 + Docs/beta/opt-guide/overview/index.rst | 5 + Docs/beta/opt-guide/quantization/index.rst | 5 + Docs/beta/quantsim/index.rst | 5 + Docs/beta/snippets/torch/apply_adaround.py | 22 +++ .../torch/create_quantizationsimmodel.py | 13 ++ Docs/beta/snippets/torch/evaluate.py | 3 + Docs/beta/snippets/torch/export.py | 4 + .../snippets/torch/pass_calibration_data.py | 35 ++++ Docs/beta/snippets/torch/prepare_model.py | 5 + Docs/beta/ug/features/adaround.rst | 102 ---------- Docs/beta/ug/features/index.rst | 141 ------------- Docs/beta/ug/index.rst | 17 -- Docs/beta/ug/optimization/index.rst | 3 - 37 files changed, 692 insertions(+), 306 deletions(-) rename Docs/beta/{api => apiref}/index.rst (62%) create mode 100644 Docs/beta/apiref/onnx/adaround.rst create mode 100644 Docs/beta/apiref/tensorflow/adaround.rst create mode 100644 Docs/beta/apiref/torch/adaround.rst create mode 100644 Docs/beta/apiref/torch/model_preparer.rst create mode 100644 Docs/beta/featureguide/adaround.rst rename Docs/beta/{ug/features => featureguide}/autoquant.rst (96%) rename Docs/beta/{ug/features => featureguide}/bn.rst (97%) rename Docs/beta/{ug/features => featureguide}/cle.rst (97%) rename Docs/beta/{ug/features => featureguide}/cp.rst (97%) create mode 100644 Docs/beta/featureguide/index.rst rename Docs/beta/{ug/features => featureguide}/qat.rst (97%) rename Docs/beta/{ug/features => featureguide}/quant_analyzer.rst (95%) rename Docs/beta/{ug/features => featureguide}/quant_sim.rst (96%) rename Docs/beta/{ug/features => featureguide}/spatial_svd.rst (96%) rename Docs/beta/{ug/features => featureguide}/visualization.rst (95%) rename Docs/beta/{ug/features => featureguide}/weight_svd.rst (96%) create mode 100644 Docs/beta/images/adaround.png create mode 100644 Docs/beta/opt-guide/compression/index.rst create mode 100644 Docs/beta/opt-guide/index.rst create mode 100644 Docs/beta/opt-guide/overview.rst create mode 100644 Docs/beta/opt-guide/overview/index.rst create mode 100644 Docs/beta/opt-guide/quantization/index.rst create mode 100644 Docs/beta/quantsim/index.rst create mode 100644 Docs/beta/snippets/torch/apply_adaround.py create mode 100644 Docs/beta/snippets/torch/create_quantizationsimmodel.py create mode 100644 Docs/beta/snippets/torch/evaluate.py create mode 100644 Docs/beta/snippets/torch/export.py create mode 100644 Docs/beta/snippets/torch/pass_calibration_data.py create mode 100644 Docs/beta/snippets/torch/prepare_model.py delete mode 100644 Docs/beta/ug/features/adaround.rst delete mode 100644 Docs/beta/ug/features/index.rst delete mode 100644 Docs/beta/ug/index.rst delete mode 100644 Docs/beta/ug/optimization/index.rst diff --git a/Docs/beta/_static/style.css b/Docs/beta/_static/style.css index 0df77588fe1..76652d136b1 100644 --- a/Docs/beta/_static/style.css +++ b/Docs/beta/_static/style.css @@ -298,7 +298,7 @@ div.document { } div.document section, div.document div.section { - margin: 4rem 0; + margin: 2rem 0; } div.document section:first-child, div.document div.section:first-child { @@ -306,7 +306,7 @@ div.document div.section:first-child { } div.document section > section, div.document div.section > div.section { - margin: 4rem 0; + margin: 2rem 0; } div.document section > section > section, div.document div.section > div.section > div.section { @@ -584,6 +584,20 @@ div.document div.admonition.attention p.admonition-title, div.document div.admon color: #e7b486; } +/*****************************************************************************/ +/* Custom classes for UI control extensions: tabs, grids, cards. */ + +/* tab-heading: Heading for use inside tabs, since underline */ +/* notation doesn't work there. Invoke with: */ +/* .. container:: .tab-heading */ +/* Heading text */ + +.tab-heading p { + font-size: 1.2em !important; + font-weight: bold; +} + + /*****************************************************************************/ /* Sidebar */ div.sphinxsidebar { @@ -607,6 +621,14 @@ div.sphinxsidebar div.sphinxsidebarwrapper { height: 100%; overflow-y: auto; } +/* This is a total hack to remove the inexplicable ToC from the left sidebar */ +div.sphinxsidebar div.sphinxsidebarwrapper > div { + display: none; +} +/* Format the TOC caption (title) */ +div.sphinxsidebar span.caption-text { + font-weight: bold; +} div.sphinxsidebar ul { padding-left: 0rem; list-style: none; @@ -715,6 +737,11 @@ div#right_sidebar p { } } +/* Limit the TOC depth in the right sidebar */ +div.page_toc ul ul ul { + display: none; +} + /*****************************************************************************/ /* Footer */ div.footer { diff --git a/Docs/beta/api/index.rst b/Docs/beta/apiref/index.rst similarity index 62% rename from Docs/beta/api/index.rst rename to Docs/beta/apiref/index.rst index af1bf25ef41..3d98f4071dc 100644 --- a/Docs/beta/api/index.rst +++ b/Docs/beta/apiref/index.rst @@ -1,4 +1,4 @@ -.. _api-index: +.. _apiref-index: ######### AIMET API diff --git a/Docs/beta/apiref/onnx/adaround.rst b/Docs/beta/apiref/onnx/adaround.rst new file mode 100644 index 00000000000..bdc8487ac32 --- /dev/null +++ b/Docs/beta/apiref/onnx/adaround.rst @@ -0,0 +1,21 @@ +.. _apiref-onnx-adaround: + :orphan: + +####################### +AIMET ONNX AdaRound API +####################### + +**Top-level API** + +.. autofunction:: aimet_onnx.adaround.adaround_weight.Adaround.apply_adaround + + +**Adaround Parameters** + + +.. autoclass:: aimet_onnx.adaround.adaround_weight.AdaroundParameters + :members: + +.. note:: + + We recommended using onnx-simplifier before adarounding the model. diff --git a/Docs/beta/apiref/tensorflow/adaround.rst b/Docs/beta/apiref/tensorflow/adaround.rst new file mode 100644 index 00000000000..28860723e75 --- /dev/null +++ b/Docs/beta/apiref/tensorflow/adaround.rst @@ -0,0 +1,22 @@ +.. _apiref-keras-adaround: + :orphan: + +############################# +AIMET TensorFlow AdaRound API +############################# + +**Top-level API** + +.. autofunction:: aimet_tensorflow.keras.adaround_weight.Adaround.apply_adaround + +**Adaround Parameters** + +.. autoclass:: aimet_tensorflow.keras.adaround_weight.AdaroundParameters + :members: + +**Enum Definition** + +**Quant Scheme Enum** + +.. autoclass:: aimet_common.defs.QuantScheme + :members: \ No newline at end of file diff --git a/Docs/beta/apiref/torch/adaround.rst b/Docs/beta/apiref/torch/adaround.rst new file mode 100644 index 00000000000..005f56c28df --- /dev/null +++ b/Docs/beta/apiref/torch/adaround.rst @@ -0,0 +1,37 @@ +.. _apiref-torch-adaround: + :orphan: + +########################## +AIMET PyTorch AdaRound API +########################## + + +.. container:: tab-heading + + Top-level API + +.. note:: + + This module is also available in the experimental :mod:`aimet_torch.v2` namespace with the same top-level API. + To learn more about the differences between :mod:`aimet_torch` and :mod:`aimet_torch.v2`, see the + QuantSim v2 Overview. + +.. autofunction:: aimet_torch.v1.adaround.adaround_weight.Adaround.apply_adaround + + +.. container:: tab-heading + + Adaround Parameters + +.. autoclass:: aimet_torch.v1.adaround.adaround_weight.AdaroundParameters + :members: + + +.. container:: tab-heading + + Enum Definition + +**Quant Scheme Enum** + +.. autoclass:: aimet_common.defs.QuantScheme + :members: \ No newline at end of file diff --git a/Docs/beta/apiref/torch/model_preparer.rst b/Docs/beta/apiref/torch/model_preparer.rst new file mode 100644 index 00000000000..8412242ef1e --- /dev/null +++ b/Docs/beta/apiref/torch/model_preparer.rst @@ -0,0 +1,153 @@ +.. _apiref-torch-model-preparer: +:orphan: + +################## +Model Preparer API +################## + +AIMET PyTorch ModelPreparer API uses new graph transformation feature available in PyTorch 1.9+ version and automates +model definition changes required by user. For example, it changes functionals defined in forward pass to +torch.nn.Module type modules for activation and elementwise functions. Also, when torch.nn.Module type modules are reused, +it unrolls into independent modules. + +Users are strongly encouraged to use AIMET PyTorch ModelPreparer API first and then use the returned model as input +to all the AIMET Quantization features. + +AIMET PyTorch ModelPreparer API requires minimum PyTorch 1.9 version. + + +.. container:: tab-heading + + Top-level API + +.. autofunction:: aimet_torch.model_preparer.prepare_model + + +.. container:: tab-heading + +Limitations of torch.fx symbolic trace API + +Limitations of torch.fx symbolic trace: https://pytorch.org/docs/stable/fx.html#limitations-of-symbolic-tracing + +**1. Dynamic control flow is not supported by torch.fx** +Loops or if-else statement where condition may depend on some of the input values. It can only trace one execution +path and all the other branches that weren't traced will be ignored. For example, following simple function when traced, +will fail with TraceError saying that 'symbolically traced variables cannot be used as inputs to control flow':: + + def f(x, flag): + if flag: + return x + else: + return x*2 + + torch.fx.symbolic_trace(f) # Fails! + fx.symbolic_trace(f, concrete_args={'flag': True}) + +Workarounds for this problem: + +- Many cases of dynamic control flow can be simply made to static control flow which is supported by torch.fx + symbolic tracing. Static control flow is where loops or if-else statements whose value can't change + across different model forward passes. Such cases can be traced by removing data dependencies on input values by + passing concrete values to 'concrete_args' to specialize your forward functions. + +- In truly dynamic control flow, user should wrap such piece of code at model-level scope using torch.fx.wrap API + which will preserve it as a node instead of being traced through:: + + @torch.fx.wrap + def custom_function_not_to_be_traced(x, y): + """ Function which we do not want to be traced, when traced using torch FX API, call to this function will + be inserted as call_function, and won't be traced through """ + for i in range(2): + x += x + y += y + return x * x + y * y + +**2. Non-torch functions which does not use __torch_function__ mechanism is not supported by default in symbolic +tracing.** + +Workaround for this problem: + +- If we do not want to capture them in symbolic tracing then user should use torch.fx.wrap() API at module-level scope:: + + import torch + import torch.fx + torch.fx.wrap('len') # call the API at module-level scope. + torch.fx.wrap('sqrt') # call the API at module-level scope. + + class ModelWithNonTorchFunction(torch.nn.Module): + def __init__(self): + super(ModelWithNonTorchFunction, self).__init__() + self.conv = torch.nn.Conv2d(3, 4, kernel_size=2, stride=2, padding=2, bias=False) + + def forward(self, *inputs): + x = self.conv(inputs[0]) + return x / sqrt(len(x)) + + model = ModelWithNonTorchFunction().eval() + model_transformed = prepare_model(model) + + +**3. Customizing the behavior of tracing by overriding the Tracer.is_leaf_module() API** + +In symbolic tracing, leaf modules appears as node rather than being traced through and all the standard torch.nn modules +are default set of leaf modules. But this behavior can be changed by overriding the Tracer.is_leaf_module() API. + +AIMET model preparer API exposes 'module_to_exclude' argument which can be used to prevent certain module(s) being +traced through. For example, let's examine following code snippet where we don't want to trace CustomModule further:: + + class CustomModule(torch.nn.Module): + @staticmethod + def forward(x): + return x * torch.nn.functional.softplus(x).sigmoid() + + class CustomModel(torch.nn.Module): + def __init__(self): + super(CustomModel, self).__init__() + self.conv1 = torch.nn.Conv2d(3, 8, kernel_size=2) + self.custom = CustomModule() + + def forward(self, inputs): + x = self.conv1(inputs) + x = self.custom(x) + return x + + model = CustomModel().eval() + prepared_model = prepare_model(model, modules_to_exclude=[model.custom]) + print(prepared_model) + +In this example, 'self.custom' is preserved as node and not being traced through. + +**4. Tensor constructors are not traceable** + +For example, let's examine following code snippet:: + + def f(x): + return torch.arange(x.shape[0], device=x.device) + + torch.fx.symbolic_trace(f) + + Error traceback: + return torch.arange(x.shape[0], device=x.device) + TypeError: arange() received an invalid combination of arguments - got (Proxy, device=Attribute), but expected one of: + * (Number end, *, Tensor out, torch.dtype dtype, torch.layout layout, torch.device device, bool pin_memory, bool requires_grad) + * (Number start, Number end, Number step, *, Tensor out, torch.dtype dtype, torch.layout layout, torch.device device, bool pin_memory, bool requires_grad) + +The above snippet is problematic because arguments to torch.arange() are input dependent. +Workaround for this problem: + +- use deterministic constructors (hard-coding) so that the value they produce will be embedded as constant in + the graph:: + + def f(x): + return torch.arange(10, device=torch.device('cpu')) + +- Or use torch.fx.wrap API to wrap torch.arange() and call that instead:: + + @torch.fx.wrap + def do_not_trace_me(x): + return torch.arange(x.shape[0], device=x.device) + + def f(x): + return do_not_trace_me(x) + + torch.fx.symbolic_trace(f) diff --git a/Docs/beta/conf.py b/Docs/beta/conf.py index 8991dd34b86..df85b122624 100644 --- a/Docs/beta/conf.py +++ b/Docs/beta/conf.py @@ -147,6 +147,7 @@ def setup(app): 'banner_text': "", 'display_version': True, 'source_url': "https://github.com/quic/aimet", + 'localtoc_maxdepth': 2, 'globaltoc_includehidden': True, 'globaltoc_maxdepth': 2, 'globaltoc_collapse': False diff --git a/Docs/beta/featureguide/adaround.rst b/Docs/beta/featureguide/adaround.rst new file mode 100644 index 00000000000..a7c4e10c9e3 --- /dev/null +++ b/Docs/beta/featureguide/adaround.rst @@ -0,0 +1,187 @@ +.. _featureguide-adaround: + +################# +Adaptive rounding +################# + +Context +======= + +By default, AIMET uses *nearest rounding* for quantization, in which weight values are quantized to the nearest integer value. + +AIMET adaptive rounding (AdaRound) uses training data to choose how to round quantized weights, improving the quantized model's accuracy in many cases. + +The following figures illustrates how AdaRound might change the rounding of a quantized value. + +.. image:: ../images/adaround.png + :width: 600px + +See the :doc:`Optimization User Guide <../optimization/index>` for a discussion of the recommended sequence of all quantization techniques. + +Complementary techniques +------------------------ + +We recommend using AdaRound in combination with these other techniques: + +- After batch norm folding (BNF) and cross layer equalization (CLE). Applying these techniques first might improve the accuracy gained using AdaRound. +- Before quantization aware training (QAT). For some models applying BNF and CLE may not help. For these models, applying AdaRound before QAT might help. AdaRound is a better weights initialization step that speeds up QAT + +Conversely, we recommend that you *do not* apply bias correction (BC) before or after using AdaRound. + +Hyper parameters +---------------- + +A number of hyper parameters used during AdaRound optimization are exposed in the API. The default values of some of these parameters tend to lead to stable results and we recommend that you not change them. + +Use the following guideline for adjusting hyper parameters with AdaRound. + +Hyper Parameters to be changed at will + - Number of batches. AdaRound should see 500-1000 images. Loader batch size times number of batches gives the number of images. For example if the data loader batch size is 64, set 16 batches to yield 1024 images. + - Number of iterations. Default is 10,000. + +Hyper Parameters to change with caution + Regularization parameter. Default is 0.01. + +Hyper Parameters to avoid changing + - Beta range. Leave the value at the default of (20, 2). + - Warm start period. Leave at the default value, 20%. + +Workflow +======== + +Prerequisites +------------- + +To use AdaRound, you must: + +- Load a trained model +- Create a training or validation dataloader for the model + +Workflow +-------- + +Step 1 +~~~~~~ + +Prepare the model for quantization. + +.. tabs:: + + .. tab:: PyTorch + + .. container:: tab-heading + + Prepare the model for quantization + + AIMET quantization simulation (QuantSim) for PyTorch requires the user's model definition to follow certain guidelines. For example, functionals defined in forward pass should be changed to an equivalent + **torch.nn.Module**. For a list of these guidelines, see the :ref:`Optimization Guide `. + + Use the :ref:`AIMET ModelPreparer API ` graph transformation feature to automate the model definition changes required to comply with the QuantSim guidelines. + + .. literalinclude:: ../snippets/torch/prepare_model.py + :language: python + + For details of the model preparer API see the + :ref:`Model Preparer API `. + + .. tab:: TensorFlow + + Tensorflow has no preparation requirements. + + .. tab:: ONNX + + ONNX has no preparation requirements. + + +Step 2 +~~~~~~ + +Apply AdaRound to the model. + +.. tabs:: + + .. tab:: PyTorch + + .. literalinclude:: ../snippets/torch/apply_adaround.py + :language: python + + .. tab:: TensorFlow + + .. literalinclude:: ../snippets/tensorflow/apply_adaround.py + :language: python + + .. tab:: ONNX + + .. literalinclude:: ../snippets/onnx/apply_adaround.py + :language: python + +Step 3 +~~~~~~ + +Evaluate the model. + +.. tabs:: + + .. tab:: PyTorch + + .. literalinclude:: ../snippets/torch/evaluate.py + :language: python + + .. tab:: TensorFlow + + .. literalinclude:: ../snippets/tensorflow/evaluate.py + :language: python + + .. tab:: ONNX + + .. literalinclude:: ../snippets/onnx/evaluate.py + :language: python + + +Results +------- + +AdaRound should result in improved accuracy, but does not guaranteed sufficient improvement. + + +Next steps +---------- + +If AdaRound resulted in satisfactory accuracy, export the model. + +.. tabs:: + + .. tab:: PyTorch + + .. literalinclude:: ../snippets/torch/export.py + :language: python + + .. tab:: TensorFlow + + .. literalinclude:: ../snippets/tensorflow/export.py + :language: python + + .. tab:: ONNX + + .. literalinclude:: ../snippets/onnx/export.py + :language: python + +If the model is still not accurate enough, the next step is typically to try :ref:`quantization-aware training `. + + +API +=== + +.. tabs:: + + .. tab:: PyTorch + + .. include:: ../apiref/torch/adaround.rst + + .. tab:: TensorFlow + + .. include:: ../apiref/tensorflow/adaround.rst + + .. tab:: ONNX + + .. include:: ../apiref/onnx/adaround.rst diff --git a/Docs/beta/ug/features/autoquant.rst b/Docs/beta/featureguide/autoquant.rst similarity index 96% rename from Docs/beta/ug/features/autoquant.rst rename to Docs/beta/featureguide/autoquant.rst index b448cd17cc0..5bf12f60e6f 100644 --- a/Docs/beta/ug/features/autoquant.rst +++ b/Docs/beta/featureguide/autoquant.rst @@ -1,4 +1,4 @@ -.. _feature-autoquant: +.. _featureguide-autoquant: ################# Automatic quantization diff --git a/Docs/beta/ug/features/bn.rst b/Docs/beta/featureguide/bn.rst similarity index 97% rename from Docs/beta/ug/features/bn.rst rename to Docs/beta/featureguide/bn.rst index d916934ef5e..73a9ac223e5 100644 --- a/Docs/beta/ug/features/bn.rst +++ b/Docs/beta/featureguide/bn.rst @@ -1,4 +1,4 @@ -.. _feature-bn: +.. _featureguide-bn: ################# Batch norm re-estimation diff --git a/Docs/beta/ug/features/cle.rst b/Docs/beta/featureguide/cle.rst similarity index 97% rename from Docs/beta/ug/features/cle.rst rename to Docs/beta/featureguide/cle.rst index 99216108dd3..44db666ed0d 100644 --- a/Docs/beta/ug/features/cle.rst +++ b/Docs/beta/featureguide/cle.rst @@ -1,4 +1,4 @@ -.. _feature-cle: +.. _featureguide-cle: ################# Cross-layer equalization diff --git a/Docs/beta/ug/features/cp.rst b/Docs/beta/featureguide/cp.rst similarity index 97% rename from Docs/beta/ug/features/cp.rst rename to Docs/beta/featureguide/cp.rst index b5a08313b0d..5f011a42998 100644 --- a/Docs/beta/ug/features/cp.rst +++ b/Docs/beta/featureguide/cp.rst @@ -1,4 +1,4 @@ -.. _feature-cp: +.. _featureguide-cp: ################# Channel pruning diff --git a/Docs/beta/featureguide/index.rst b/Docs/beta/featureguide/index.rst new file mode 100644 index 00000000000..4e9fe4ce6bc --- /dev/null +++ b/Docs/beta/featureguide/index.rst @@ -0,0 +1,69 @@ +.. _featureguide--index: + +####################### +Optimization Techniques +####################### + +.. toctree:: + :hidden: + + Quantization aware training + Automatic quantization + Adaptive rounding + Cross-layer equalization + Batch norm re-estimation + Quantization analyzer + Visualization + Weight SVD + Spatial SVD + Channel pruning + +:ref:`Quantization aware training (QAT) ` +====================================================== + +Fine-tunes the model parameters in the presence of quantization noise. + +:ref:`Automatic quantization (AutoQuant) ` +============================================================= + +Analyzes the model, determines the best sequence of AIMET post-training quantization techniques, and applies these techniques. + +:ref:`Adaptive rounding (Adaround) ` +====================================================== + +Uses training data to improve accuracy over naïve rounding. + +:ref:`Cross-layer equalization (CLE) ` +=================================================== + +Scales the parameter ranges across different channels to increase the range for layers with low range and reduce range for layers with high range, enabling the same quantizaion parameters to be used across all channels. + +:ref:`Batch norm re-estimation (BN) ` +================================================= + +Re-estimated statistics are used to adjust the quantization scale parameters of preceeding Convolution or Linear layers, effectively folding the BN layers. + +:ref:`Quantization analyzer (QuantAnalzer) ` +==================================================================== + +Automatically identify sensitive areas and hotspots in the model. + +:ref:`Visualization ` +============================================ + +Automatically identify sensitive areas and hotspots in the model. + +:ref:`Weight singular value decomposition (Weight SVD) ` +============================================================================ + +Decomposes one large MAC or memory layer into two smaller layers. + +:ref:`Spatial singular value decomposition (Spatial SVD) ` +=============================================================================== + +Decomposes one large convolution (Conv) MAC or memory layer into two smaller layers. + +:ref:`Channel pruning (CP) ` +======================================== + +Removes less-important input channels from 2D convolution layers. diff --git a/Docs/beta/ug/features/qat.rst b/Docs/beta/featureguide/qat.rst similarity index 97% rename from Docs/beta/ug/features/qat.rst rename to Docs/beta/featureguide/qat.rst index ed69ee373a8..b96f31726ae 100644 --- a/Docs/beta/ug/features/qat.rst +++ b/Docs/beta/featureguide/qat.rst @@ -1,4 +1,4 @@ -.. _feature-qat: +.. _featureguide-qat: ################# Quantization aware training diff --git a/Docs/beta/ug/features/quant_analyzer.rst b/Docs/beta/featureguide/quant_analyzer.rst similarity index 95% rename from Docs/beta/ug/features/quant_analyzer.rst rename to Docs/beta/featureguide/quant_analyzer.rst index fd6556d536d..be957cdd45a 100644 --- a/Docs/beta/ug/features/quant_analyzer.rst +++ b/Docs/beta/featureguide/quant_analyzer.rst @@ -1,4 +1,4 @@ -.. _feature-quant-analyzer: +.. _featureguide-quant-analyzer: ################# Quantization analyzer diff --git a/Docs/beta/ug/features/quant_sim.rst b/Docs/beta/featureguide/quant_sim.rst similarity index 96% rename from Docs/beta/ug/features/quant_sim.rst rename to Docs/beta/featureguide/quant_sim.rst index 142d78bf5dd..c5dc7840b47 100644 --- a/Docs/beta/ug/features/quant_sim.rst +++ b/Docs/beta/featureguide/quant_sim.rst @@ -1,4 +1,4 @@ -.. _feature-quant-sim: +.. _featureguide-quant-sim: ################# Quantization simulation diff --git a/Docs/beta/ug/features/spatial_svd.rst b/Docs/beta/featureguide/spatial_svd.rst similarity index 96% rename from Docs/beta/ug/features/spatial_svd.rst rename to Docs/beta/featureguide/spatial_svd.rst index 50a73e2adee..d1745483361 100644 --- a/Docs/beta/ug/features/spatial_svd.rst +++ b/Docs/beta/featureguide/spatial_svd.rst @@ -1,4 +1,4 @@ -.. _feature-spatial-svd: +.. _featureguide-spatial-svd: ################# Spatial SVD diff --git a/Docs/beta/ug/features/visualization.rst b/Docs/beta/featureguide/visualization.rst similarity index 95% rename from Docs/beta/ug/features/visualization.rst rename to Docs/beta/featureguide/visualization.rst index 35cc5442e78..cd61608e9fa 100644 --- a/Docs/beta/ug/features/visualization.rst +++ b/Docs/beta/featureguide/visualization.rst @@ -1,4 +1,4 @@ -.. _feature-visualization: +.. _featureguide-visualization: ################# Visualization diff --git a/Docs/beta/ug/features/weight_svd.rst b/Docs/beta/featureguide/weight_svd.rst similarity index 96% rename from Docs/beta/ug/features/weight_svd.rst rename to Docs/beta/featureguide/weight_svd.rst index 482a4ae8ae8..99a4e2dd3ca 100644 --- a/Docs/beta/ug/features/weight_svd.rst +++ b/Docs/beta/featureguide/weight_svd.rst @@ -1,4 +1,4 @@ -.. _feature-weight-svd: +.. _featureguide-weight-svd: ################# Weight SVD diff --git a/Docs/beta/images/adaround.png b/Docs/beta/images/adaround.png new file mode 100644 index 0000000000000000000000000000000000000000..ccb29564a85ffdc862ddd7b3b0839ec2a7ab82a1 GIT binary patch literal 36262 zcmeFZby$>L*FJpBFi45gA%ciB$dJ+{iV`D8N+T(aNaqZQiiFZBAl(Q^Neu?Abhn7K zgdh#S4L;BN+|PY`e1E*h@%{JeA&e92+Iz*h&b8LQCP?+pEn>n8gb)M~E8M=J20^&s zA#4i&4EVLTinRd05SFqiSqLh5L3D8c|Hi z<=ilTWa?z)=xpU+&v+vDzKMg2vm_JK$%X#s&+m0QTUq?)O7@TbwJoqgm-~qzGvZ zr|}?222!{otKkk?9zXxmpxV_s~+ZF_`_Ntu#{ceKkSY~25Sm~qZx#GH0A!e zcM=&i#3+sP&%J>axO2-uT{BIY)_;7J1o~$ACwUO;1uj11Iaa0dXLZBT2WS7G$&>v~ zl>m*spo`7B{C7Q{yeb2|O!$WBO@9cd_|S+G>XRfvTrPo5C15lMXGg z%8KnqvhJ{1J>6A;N?QsO-%;vFZCwB7;`GTFp$Aqrts&9`M=ZEYX1VrkqcK@FW$_Qp(bnFP&sl~LmD)i<~4Yfsc2^f@enVbA6g|*0|A>(Ud@tI7WvrDV1+Is~x z8B_Ygv`TA^AD$3fdwS}kl$8-mLFS_CJ3H=<%Wi&_DD_e8IXiMNr)N^BRR8LWeFzKo+#eFRj51h#Ncz~TV&AR&Q21>U z+n?V*%dV;Tqdip%dgs2i@*BjzV#72DCL&;45-%=_MtmRW8@x^b{>XT<-sMo+*X77CpjoOz2oS`^8GP?f>Q@j#@HD zc1R>b(sMKC-9{T%e@n~luOCz2Mn_S*{}?X2MKA5EPcPxFI_}hdRW?BfRU6K)^p4-^ zcKzyjjdm;Hb&iH_C1$M=Qv!A#)kQQw5uZf1lY*rLR3}$Oapd6p+p90=KU+UZK6_)2 zE-<}QJjA^1ANF7%Y-tG9F4U#eqi4fl-y&B-B5w2A&oqU~PN}5uW*mX;S*;_@x_a$pa}p(K&23G09yJM^uJB9nI-EHJ+e2LXEMZbNy)%cye*C;| z{?3q@ZEfk{Du&43wsMr6F-9{TdJHu@i|ZW7n5 z^z%EHC9?eCg|cmyIJU-$JHoq_yNmt#{q;WkBNLng$IL17tOEOHX)n2vF_}%xM-O!m z{U(P-DKw(qwOL=cfXi>S?cRajT-*+ObhWgs1?~K;C0w4?oY+rG$=U74*zj%bdTyio zBn%_#`@iWH?Eo*mp$)g+drVW8Os2eK7U`UxC?U62F=|hg+{$(w6N@SepF6uKE5Q+b zu-`D=ux(yp;v6Ya;V{V{*|$@Kw27e&0$89rGcyxvv!fOUDWTgRu8Ir__PUGT>2nPD zmVP|>4DXV6m!tXIQ?sscs}J3wv3@s%cEbJ}6{ix>h)J^`d%+#HNbhS76Jz~9kJ6-w zc=TzO6;+?aVM%fS;{JV=Z2+{^MI_;r)* z_ymBUoMuGYaglH)JwtH4woF(i@x>?GKjZ6w`OAdu64NFyBNM6(sl~;}&*laOR~n@~ z4&9wc>%82)LL_xOD{el&E;Tm1b<%(OH!mnfOYF5#97KvylCIfU4q8*aW~k~lTDDTu z^MA~r&-;!AmnMC1P0|Qv0a<+GT3VUK^LBN;hCi^7(v$n4#h0qHPKnONL0eLNO(mJ& z%D5O-9~e#a)q zyYcJ{|CT!EfqB+fzDtLCX;1lgrS=?}Ocq*>FY_K6`48P({ySS!g<)!Oe8r~2=*3N* z{@D4pQsUDac0gwc_t;}a%B=t@!M~&;Wxn80rGRxbG4c01e#%JYO$NP9CGDY*^&_}l z36qWAW$SfGPvma>d}W8ty!KdW{kR6Z+>j$NN&2^9oLycfd6L@Th)c<@SL)cvAKlyA zYYhll$@{j3ee(e$LHWyEM6)AWO2MW&rQ;yj zRhi(Zl}LGGXw&joA^xYYmI=caBkG^=*AoxlS_J`C(xF;4_ z%+lyK_V8ASE(6ZM!c6sh#x#c8A*|dbcpj7JkC)QQh=GgOU4%`*^}}lTv=@}g1O;5& z>tK?@VM8}!UYn!9$eW$0UjJZcr z29@MDy@&sfC|F7u!I##u-S?>EltJqtJ9mQ-74@Y1-rlw8!+u$U-zIkwrKG76m6>0_ zf(KZ*XTE<nOe?V)n+H@%L%QGtO?@fmb-;-h=BG`EhRx z*$m_a;GJ&@b9wh@YrlwZg6Ww0_265U>FMeFrhDB1dmHy#YkDn-ZuI_>;7JkA!x{== z5bYTkJ*bSFBjW>K@ia%9H*}-m&kB29`t7SIE3^A(rb@&p`v=(05`dWNtmm){ifE^E z*%&HKx;|Fqy%B;H>?E*_JvaAvw_np?%tssb-L@EQ{kff%sN4}Q{mQmfLUd1G+9Uiy z{*;lRoz5>hVj0Ew_pR5vmv#)300ak@v9kT$qe)~Kg<fpxNI*OY-^tivYLt%O6&4pCt4 z8v8xmxZ8VkDZPv!eQ>?*H_MA8`llEY@-7_TaUO!?gakr=zi!4v?&0WI7qfDDQKyXb z46(ZU^pDk5C;c}1$cH%p{CHQ5OeL)0!599Yt0OkWcAkbt#j7-3Dcf#SSj>Y8yM}GC zfH!t$^t-}A^oLbd(fqe}oMgB+axxYNh4)IBeom$I*)Q~DmMDgWhH^;eP}eY)rZ-4d zY2|Zx$nolVz-w`(o4JqrAuKXGy)q zu}jP{RlbLwEL)-WbzkH|vOaD9Kzs)s*YWW-gMXwfP+k=xn4hth&3O(pPh~Z9$obom zK^-OnnaBl2-i-l02O$g6FY}QysU0_>WXv{cA5Wvmc*g)eu4=bbfOkOL1j)94_c9r* zA7=h+oxM%sHj&W#VSmqEv+vU&00ZLDo{q@RPR0ZWqzAp4?Zb1l=-H}Bu78+DLq;ZM z_SZWfz;&*%Toza>y{$)9zvnY3n0bleUX5#ELR=g^9Ft|{fZ37ANnIpxMuR{9ke?gJ%s?X`uAVLp@_(9FCN%-1 z&LWE%b(??sKZJRXPXZO(mQ()++=I(wtU)#``2YEn7+Zveg+*UiSGx6=q)WJdXYl;e zSd*n}mcrR(I-8>0F_S?o5-#^DR z7SW6M;C1y-3{z0_#4=Ny;GD8;_r)t-HqHd@YE)b}6fz@CG}K%?E~ zqD~c=ssz(H8nuqpdAM=5&b9WBabut%7HJ9lj~O|4h>mg`I26S8qF^na#`6_ z*dsr@b5Y3)3JTm?S*Algx7U^<6?yz0)+%EU$l;@Omis6EhG9Z%vXqj|2iIrFwJ{Ew zQ9|5lce*8%1t9Gr3HrX^frX+PD%`T8v~9&T^$FFatzgwOkN@Tf%haH?<41S*`l&nw zjKF?n@>p$oxx!96kCMwbFHK;s!q&quG%U*xRh+TkRT(Yjy;IJ(lBv|GN#-noJM#_R zyV3pNH8UgamjR*|NPUq?hA4?w_U;{yxo=C*pRUBR3m4lpHECY45wbp72|@2 z(F9j<(xqAvmd}ZS=??5vPamlA>z>v^P8nV&4R{}-DLae|r0K+J3X)937PnQ17-nPu2uIpN_q75#muS!D$YxXTIe1P->* zxy^pA$+G+TovUwcZ5NfT+;9qCOoqA9AK}KJ# zi?5dFbG8JL>SPS}LhpH8S!s@xg=);+Cp8{|w$^1)pTu~DWBr@zFEvzDCX4&OJ4 z>Q|&Rb!-PbbG?Xb)1zx?k58XFgDKSO^TjsqR=;;)C_}vmpuGX?(v7eO6x$|l-H6hth z%a3byt8v@0!sJT$9<`4`S&|KPPNY~XcinmY;~RpxQ~Tp?NP960!f!!|+|AMaVK0nq z^f$crL^12=rQ9+jdqt!3wbu+tM7x%$M00(jvANv0Vk)`<5}a}od&*kF75Q51u76eB z4YVaB)}u;s3I>ckjL80H_MX_)iczYjWE}QE`;kq3l91pDwnMask^7uSwmpHzmGi$3zrFdBG^l$9iiyD3aSC2#cc@?1tT z*#VI-K~d|aR_z#xTa}@7DM1F}bD_NgJn4gR{*F!)6wAHiX$vk*@>a21=-? z7PoJ5H7^Y&5gaVmM_3?NE?g!5YuV$dfTl(BlvA0XA|O7(j5c2?QN`~^3Ih8ceYYhG z8BYA-l1P_C>rQYgsyJf^q0#rf4x^AQ$0X|=)=)dSZF<*espHm zt1_`ezt^%@LD%zTe&VLN#kAzMzICl;$}6_razeGm_NYk1kM}#c(*wA)d3dwTN_%i} z_IxK+thw3o{$kBHGABKALHNzn!f;+$#_#o0jxIKigcz9LR(^A|Zyf`T0516lvV@q@ zgs{LtB^?$Obu6-$^`Ni)ax<#=z0R}Jv$`n+wDnVR=O+|&@y^#XT?$2|V1`82f5iq8 z%?K=hd(%;GODQOb`+d{-!=aY_v``j*i)e41=fqyY2kucJ?n^0u35+T*RqYDYN(=d| z1{i_+f-E1wdzBP)Sn!~-B=|om3QZ1D)T;-q zgh40Ikr2iuo3df9N>))CI_!TB!#SceK#^QUM-w+$b@RN1Yh4Sljs99PDsh?k8^dVunc->i9-ygTe{IbeZ z8GU~P{>iR*7XB9!#!Wdzi;lT=_S0$Gf|n>RdD^m`e|J8%!JFHa|3XwGd7*z$R=jP= z$3<~*w>wKu{VU<^?cCo!zn{M3qmXCSWT1Zi2@c)Sn_{b`bOiORinCs62J+o#X|mJ( zkxH^#6O*- zQLM?+&AIi3M&?#Ie@jpF9kSuK3)S~@OVcMCOm9g*?42{cDdIy>w-3>V!qrXA1q&0N zXw(J`UE=ds@3to_`vcA!_KUdg#rs)uGp}fMZuSNgzI)1G92pYPR`bZxhoi7!!G2`H zSa^XJoi!ZnIYG7-Qc;(*nrVQVouT;)mFB_c#rcmzVbHb8a+AMSytGZ_4MfWB9P8H~ zTtB=<=@b5J;dA}cpme|U3+}T1028Nhh7fC^0A6Kh3yUl%oYfmublwT@_$X>41A=jJo03e<&bBU7s6e@1P7`^0=+V(ogI z^NfIFq~&t?P>28_dC_2dsq;cLO8Zw&5i026U+bY;;B3dg;Wg(Sc6nJ3)hO)IeX~%@ zMybr=vv@*L5Kphc{-~w!tJrwc4YKp`nryO#YhJy9PjL2>MF}p*2bl0L**}=i&0?wY z#D0G1{Pps~mvgX1Fw80ol~YrZyv-_mJvXuAd6ZLYcI}NDDJb^sK;$JW=6lB`o{^4z z$1{xr1T3$Pj^t1)RCUsO{4qo8Jg&|S1*7G`Ob<`$PgPM&Wj)`m_#=&T{)8vgI)>m4 zVyN`YYmw8SwSW<{EWna$e!Oe(E^&4Krf%79`TEoO3-&1H-8y_{TVV(W_@Raop zz%BbH-L|xRBD{8n_pHC1I4wTKWiVD?ifY9<;Vh|^u)|OE5mPOSEjyr;13C0r~+LQ=kVqu4ka73H&nxDuv8{cafyo~nYi@?+m>5%gPo zLe+u73n?xtJ~iK;2ye+X`LwDpCQxkJ_7_2lKa(VzswFsM} zzEwEJ^zvUbycJwC)x!G?L4>FqZ~rAU}SJi+jfBzGlfn)jwgC^Llg>-9Wk%*UzYn%d z{Gy=Cwq)f!*ENPse^q#(48{OrRiYB!dAXr!=0puoU7|ffpx*cNK6RRpxZigDJV|)( zojZ7VG)x<7a{i>2(gGvXl8L}sR=@Rxin5*H!H@6x5S(mSn!DAk+3)ecabn{lxR60t zred&|?|x57Am@AnjzS~&0sV@$*Dp1?7-&0fuBC4881pD&=!9Gmk{O4v<0sgXcoVWseo66k#79b^(f<_gX32rv8(hyDlZGkq|(9k z{F>*pc*X+I#V3lhX|E`vhG7S!3||QynlEeO=AMP)Z!)WXr@4%2(9CD;n>+qyu9^RN zC6{3g92odS6nAv_?)NygT1XUJh8^opqPP}>riX7Th>tWrQClM;q0@bx${#1@E2|Sp z496E?RBfVpINz0TJ=0{_mn_-b4{CO6ldqn58SG4O{CPOpa&I$6)qagRqNj+^(iT=?=teb62TH;IN0@*;}dY79YSEeN1UHKEaN{lnNX@{c9>3*sD9J35CZM=AT_9%Zd#2MlAco%dZPIgPh?i}sG?Jyw$d%}&G_YxukUVHDaN{4K z?$k5jjJ&yDqFb?CJH+P-&ehRlN}DL$9%@a_;D~A%q?LWvO|$OFR_#J^XBIk?rI=Nx^SN}sBa#ew41$vt(UEc z6#dcoU2MCe_@Kq{ge=*IY<9aqHuoHymWYB+zGc*#Wv$YxH;QkKTFc6P=YUa!=Eu_% zrPyRTugfh$!nL_)ka`r!-+@sJ}<*t>@)?O1&CIXsT5R#12Usr z{E4Y;GM{n#%2cIHvgyZ;j*xe&_1gV)+fs>ycl{qPR0cI=Zd4e1JiJ}iT8|+AU^o4~ zddvLN+?g80c{xfc(w19{oqQ}QWRaY3{BJB+_m{EU3rtz|xJV z$=)ZKy7*?yFPmxj);IaKcBMT;y`861XiE!SNE%(5PtP0C6@sQXES`8UB69DI$tsc1eETid)9A{p9ryeL zr^(DxmhOymva4d_)MPdDLePou5Ip+|y3Aid+=9cX>pcO`boQ@y620rIt^3@GNi|I7 zuIb|rcvK~%#LH`?SM%%ZlwAXVz>l4EVa*+nwJEL=JJsleDO$ zPVrrzI9qGg%he<^wNg3QcS0yN4Yrp>i@wEstE)R8Qpg0OQqvJ+=0pL{iV){nc-bBJ zV}=6|XKMDsr?N^BgbeC!d91qtDF|Ur0Tj3O{bexl%~p8H%9rO*>i9~p%fhn_H&k;d zzQ*;`0zMOKYsEsN4`eznc>zFS0tW-LD5-c6n3gDny4>Y`-^tv!JhtBQLuGh@oK@@M~oZ@PjYq}<%Pn& zu}}!^0b=IcYsCb7XQWKZ^2C=)aRO*>Anm@-vyG=@AU~lMQor$;d|fbIJTE0a?r!s@ z!#y#MCa2RoKVC6$yyTpJpjy*R=QeH`J`3+omt3|bwIW*eSNj?G%i@Hx*ftVq`DW)=ub>URF%b%4 z7!ksj_6l=ch5b0W>`PQKZk5MRN~(F9=f?OK713)YPv`Qk+_LC@)cNYH)Cbzf<_LV2 zXBT)b$IOB`d-lfpoC`T38Yq}vQx{T&)@&U z2Wmtoxqij6zO-%o1CtyxY8&OA8s_`?Nq)H4q0p-L)S;$OUZ$(#L-$*F2!~Z>)j8NPF3tG^2UF|$7?5}i zf3=mo1+&S-3?&NprN?TPuMB+#*aPb)MH;CQBsoct>0=)hkH96oTM6wfWjKJEZZv%b zUDyt9+7fP5jP!KV*+y0W&N~Pib+PWRx`+cRVBi{`+*e*e=i4LP#bg3FyvX6w-v4Bt zQ#N6z$X15CeYBFeINwb!yJin+y^*o*!v$VOc87~Gu_13R8&eo7^OZW1U=#^?88k|V zbKiafB7^C&htfzP?tz*&WB2BaXoP4lPi25bMt&YB?c!Hgwb*D?ZmcyF;pcU{b9j@9 zXi@12^~Uw^n8Letks%!YHjk`BWFnv$*@&~vr0gf)8deip4JO|unx*XIcaOL!6mVdE zxHMM|>_%^YW@cw^f3p3<=k6?@(i-TpHxk{n-`%P{s;g!B+A>H16w2`u^b?+gaMH+l zotq^97HFBkx#|zbK6+Idn?W0~2$c$a5hg6n^ldqF{SFBn|5AyaFXHnnZAz@ueDCDxNG49!m;smWKyz?2nZ85rOdp&6U!J&JxkJaR2g= zG2@^$1GlRtxo>8qujB@I#ziDNCn8-l1X+KdHOoJY0AzieHwVv^AYSQFHFefDBTnh~ zE5>y|QClbtMc7Y#d_`VIGc(doV(!eF!R=9enIbk$*Rieu%18B{ZD^u^X3D8lijXj@ z8C_*3$*fz=A-g2p15m+lSC?<@dHuSm4P3aubvaHeGO~DEKbx1P3KW*^>5iQVF)Y|P z6P*CniwH#uV+nTBlmb#-kerFx`z3BbUA?YsZL3xv)UG1<0Xc2KFTLK{(b17}RnJ1e z>-DqRyY6NgM&mAnH?3PE>IoJ0bTpeNq(`H5FL#9kvk6;|1<3dU06nlZP>MfkzQ$D4 z-q`I`d`J)Pr!Wr5v8b`sT#6L1Zuaw7S{cSP_jPn6N@j95%7BKtK|^oz=wq)4?sbc% zD6;j|3$S*;OSo-Fo~wJn_RMDe5Z9p-h_D0@A!(zU7a-FF!izRQOQJd``$aC&&q`}$ zOtcWR|+cQ=RvbTBDLn>QshKYac~$eTs6gtEsU;s!b8(UNKf=SfZ(TKp~sa!O5= zUle=8zTItNk`{F|ACt zzqRbtk4C&=wccLs+|P-{^NWF(N(-gs(fr(6;j zhh>LN77I#zSyfdF+S1_@t806yUO=>+SY17(mh=Hts>-!QjV*kbjEWv;v;tnQlCM$@ z7;GqHdV`MfjwKHX@tLEqxw1lHo;u9}IObYYV5~KWgL#1mJHfHfwMooFuOL%JH~X}4=E)*@|C2XlH5IkF`JGBVp35FmY5 zuR>z7wNO9tHCXbKIOag~3|MlQFyp7~J7CEMk*)@p;b?>2VW0HVCA$v#_gqHH7=k5x zv_{w?;a9E%_nq{*EpXcs(Y^K{vhrP)lBYy5R*n4iVSFs6su6P7`yfc4(!EIY;s7+R zRHr770){ObT5f1PRq(#ebplX0od^CPp`jqag>lrH+T1J+n`F8u#pdYf7(t8C5xV`{ znMPfW@gV`6@`T`@3|=h(!T%zt`2&{)iW7e*N*m|zlNDxkXQ71>hG?S#Rb&lri*zA& zq?{VBlQ3!yw{067Pk@`tr*ad2r^Q(HziSBN%k&ceaRVz1);FmewO7R>lj2;5TgD{~ z!!C0dD*j$yJ|XHpZ~$JgzV|t;>{l>g)JJ52b4SI9(mTt5fmqTpIPX3*NUSwKK36-GPg5J2CT^mG?b11eTahF>0t1RkY0%l0z_ z_uES23n|Z6BX#?Axt0j~v7OC$^G4d(?B}g~SsEq%xdLztpwlv$QWT&%%>~RJx14Xo%mD3o5i;f!4S)&#vyOAud*xrf}a#i z^2Q@FsoZ!B7d#bLh961nzt>>C*%b9$P_X|7V<13hLS`P>R2cxJn4uWUn^4!rnZ*kC z(=EFtN^M_8qep>t>?<#2s<1I)ut7nXsm#kOKVDYiPI@kNdR#LW&w2bagoA+hqI{}9 zFgB&FbDnKEFv{Y0j<=2oh-j9FN8r>b!Ca)GKpYB)yH88lT_0%za33;azLz0MdlMGg}OVd?rdfH)HjpXIo|sh;`m>@5k* zpz?yjn(*;11TlSQWo-9%TlFm-srPxaGU^XzvHo-LtOo>5QKuEhP2i54;9N#8E!epL zD%wjHxZ@rDJ7<2oqq>+A0Ju`H^EP|fMy0H=nc3a7h?lNqu-R&Kz7}do)y$?HfV*9< zDldYN8i0FJ(pl060Df#+)pH}kt%7#;r^}KolPDl$fh#IiAI<^?PI7rI`rCmY zylw%fDhf^1-$JMZ1@X4Z&p&t z6dha>s3m7l1=^|v)X7ROI7*x0;bA$nq->shlT_lXqQ0m%JFp#s?K2)r$ zPKO}5&4r~2*LuFhd6l=AssmwLesL%A_-nNyd0@1qG6BmDXcso&fEzyF0O+yO_KMQE z07siI&c#*!UJMl`18mF$Y%G}WX_^j;qo?Rv4OuMCC9%!CcdJtV=nxLA!yz>qgd!zy zoamGe(g$RK6@+X2f&eQ>{1YpApEtBxXpG6aAMl677E908?f68GAaoO;lz(4~`8)55@^$83ca4U*Bd^zP1^r61w8 zV>>2at2vX5m2zT%D3YZRIt<&sv(+Mz*qkEw}&1kAfsT(iG^zse>jaTH_ zmsO)cpNidT|L5hC6Duy7*H5FfMlmT7H1|Lk2>+xD6ve(4h}5_5FqpuX5Yqgt0}R!4 ziQpWQssdHQ{(L8R3!K8MWVf zdH8+|%wG&o^wJ%ok#++Ej@n%1bK5VqP%bG(v6I5hfqA1?RO<2L07O{Cax$<$^H%ww zE1ea#YPAK*a{RXdA0FU*v_g#Fa~pbBtUm|ch0tan>6W;!UPS8Bh#n%~AeCPg5zk40 zgS=jrXFx#X53<9QtWJ#U+f48X9ENni3>@U9fLah4uC>t5vZIKw*n54~!74Z&$19UK zUI$fKkVV={O5kH!IVGbS(U376qqfTLbyy#=3xj?$E0(4^@sisWUZJfgr1vepqyE== zcz+!n!B4iL@Ld%#Z4^f2F@tuwrnTMTqCdz_fm)ndAqXh=A&&%gFLE3(Jom;H5`dE= z04LLE*&ly^D|A9huYB64bzO{{o1%WKRFT6b0g8Bop2^HRAYzNEqv!wum6g5aWI6S# zLQcUzSqSqUFeJis)+?8A?|jM0V8QzM^dIy;=lj%r$$9ayj2kfJc=bbSxCba>enz+; z$E}ZI77fK+^i58Mh~J5$18zkG+zOcxt{BC_TO1Q7&`$v3-J$8#bfz%;q0fvJHB|cXI=DG+sRsfm_ZH^<`Ha?Hx1QDQ7y_H)N z3X(OQUS<+M=)N?Ab0VmdWZhZEXa-_X3)F`9hPf%)F2j;xG!EQNQB`Is_0+aP zm2o?YRF#n-&e^CS$XN78^_=x-@ch8Nj04X7s-gMbw^_VaWx!uMixmiM`Cpma@g5^b zSL|ECq3OIVPBBwOPF9}lsbs+Cq90grF1!S8`?{&EItLsGJj27~aq4mHoR?18vem$0 zX7?g%8sM755CEQi0n}%bc@3~~b-tooj7EUX*4Oi7-hsL@2NqKcezVz^%(1}NZh?<7 z2(fB%++?&mW6Tj)!l>RvL39Db#AqA@m<0>5EFX!x9$KpP#dUUYVlrxNx%ae0>&!#B zfJ5KFOKvE#^T?&jVTOk*#cDsiXs1;_cyP-aBXg49bl4-#z=uJ!l$AM4vcS+L~%TO3~|x5vg!tQ1Jv`y-FZG{NW3ysGl6HZGU=h_YUfiFqLAj#jHNlL%GiPkmUrXR?+)3?NPjwp7putc87C>>E zvf9k>=Auruwc;C<)%iW&S=Ag~IKCi&r3)0^0HDqyz&z^CfaU(7Q}em4u|vN+!;5~L zIgO#@l|pDkk{uAl^6M+^s1fgJF55Z5uvQEE9>6`@pSOpEp8?=NHs1*x33b6R+i3R@hp7yJ}Q4UjJ&?kB`)qgk2>y zXFav{LWBU^9+-68x>!O9H0M;t$$CL^j!8=A%Kz}WLMjp81C;4*-UXTtFXeFbj+3?AV70V z7+2e>-AOsza=+EKf!cM&T7kWDzXsfrJ#~SAs!ggBvQ=nOKoF9)I7zT)V9or|a{x^` z>8b)2Nugz^JyR)Yqoz+ciW2h`82-61+pPvf1RUMC?h$;DXZw6)Q!Gmvg~}yAFF_(P ziL|OnF$|$|UDYC*@Lg!0SsKKHeW-7@u|Aqr(m1|anS=Y`gLQXX^qw8KXa-IP37gmz8 zVk|SOpV^F&-+sPPqfM>|X2xLNB7ZV((IksY@Jv_ix6>OmY^EUS@%gqJ)Ytg#XAE~x zG{;$}>D+)@ga!14Swda4Aq7l>=k-$u*HeF9ti}OzPT)3r3mQuo0c#%BOFP)JSlrnY zI@zHR@bJ9uFMa&dm{x@kMsK&4_xAF3^EFvb6%VU}_)S*>W@Z!(e{@@W*={X1EySLu zxAuPF8$JhP&3v>OCf?_^zu*6{DU_4Lody*Q&St$dWQ<&A^l{Y(BV{{4yzJ1N&hHW$ zGPP1Mr|?q@;hvl!2mtT{3zWlmW_9o~`s9Hl^L9YBBhbSO7%e!PrA80glvd@gH%}+u%^2KC5x&>f z?Xkk7Qby-T4BolgIUapi&-RZGaH`W!+&pNothBdNymhX2=GSOwv*aD{0)ogOAH}^FeJQjw zpV?et;^0&vd5f3CGxH`eOL_}%qJkx_1NWq2|J~gD?}|NCh7OZa3H=0l*wN!);=V4` zMW4$p@nfGSzxM}tWMNbFUkRG7Eg*gG*1fUOUQ5D0b*K~KLq9mVJwQX{grN+6?;ElQ z3}t*p-E09LqRKTeHvG*{uuS2UfT1u0NV;pPO2LNv?IxM6tC95@2I6`nisTfQHdRUUW@hn7qY`vhSPlo1v7jr>pp2&FF@YwnsbUo%l;bs;J&z6}67K%+nFL=< zz^MAeZ+qO04*AM+3O4gl$1~~L`~zil0Qj~b)h72TJ~mA`aJFmDtn={%GdNwEm=&V^ z@s#uJ7%T%E&^!sko}NB|^O&IQa_yG-R|ocg9TT4flBqxzskeR2ZJ7y~^T`dXORxuXW^|0mVSXTW#lZHZ;D$$prA@Y3vaKNsKbK@*lL`l zCN3D(5rKV3*#)CI)yZG8|S z7b6V&J}3Bd%cfFg?hLgarxPH8dccuvtq9tt(`0(|(8bk_2d>7{OG8%iwCZ<)0U1iO zFX%i|dw<)WMF2u2)?WN(xipX)zsE)gmjXkrouRzGR&(J0?S-*+!(gbjWQDRm1$aGm zV?scI15bupZ77Rm0%lxDGT-7>|9xfA4=k95n_Sw-qNzS!-@?TGg>j^{{N&ysaE3|N zP@tHvq{@^k0va`NXk2bnfq2E0^40iHHJW!_od#%B8EBN(ntwwTXw=2D5S&{wlQ0Yn z2;o@J)$#We*x$yZ?!0rS!rRO79Yr-EY#)@@%QobS8g{_*%SWYkz2NA(#4qZ9Hwf-V z=Mus}mIVz07*#A2390e01>(v^G#`t9_}W$p_%aHj8rM%>7)Hz3?wz|R-f|8e21+)O z2{gr|IM}%xYQ!ZFA96)mZik-keAIwQ5!~GGK>#=ji_*J#BISmSn3Cr>ICC^Fv}Zvt zX(`6nqMb@;3yaef{4MF=z8sbs6ca|e5uQ6tfDmW0copNpuoJ|E%8jQm%SEK}8k93` z0G!5xTCwUFaD39HKqq<<;$}SQDFrt*Dh(ECu2voXYN|h)EgaT4B-zIo0f%pi`{I zKqPnym~~;8++5+1@`C;j0h~~3FZY*xEC%U@$A_2LJ(S+q0%E)f$W!Ry6yvch76)kI z{Dp8%cW}mIGf&hjucJ2k$9D@GT;`!{H)o&8k@xDn2$B{9FyM7F;s1Z_y?H#;U;95k z!zfar6xmBcSra3B3ldpU_AQa@d)6_uXt9>Eh9YDqTecZ0WY4}E*^NE>Ff+e1w%+&s zc|X4Y{~nLqU*k3NI?Huk=Q_`8IoDaZGgLgG%4Gd$?A=mOO}Z1QW#xG1RjC3OfQpBO zXd~xTv#wUKf=1nXU%k<~2JEz;N2;12ea-SMOIh`{t?|V5<4-VJ5Vya+n*2%Uh~JZg z#==@}Px&_TZ4I{AoKd9QiZT?(hCtBw*W1F|f6xGiv(HA#^Xrhkf0ppf@~7ayzu-hK zKM-0KJ?A;181(#tGZE2GsBJWYa-lYGGd}GsA8^v8+-G1%K6KoW}zC>}5HC@h4 zuwv52KL}A?l4er5@#4sxrGrm^dvw+g`zsk8_+P5sS7YdEf!CT>OcS5}qqsEOF_Y@I z$?O5`2hZk66~;t%Y*fG4=_3<6y3kPt1)^jeg~nB^z{{m)_!#kgbMqZ2 zLty9hUe{NH4pj;gGKOwjYjmCmPLuRsbyHZN>|(>5wYTF7^9EHsz`8U8sU5GWIA7km z15Pi?TaXQUSRm!On(o91j|Iwe1K4CmvP7!&lh@`%*(Ny13c=Fs9=!TC+X_I=v;uc0 z1Qq05pEv@r8wAHwE-3w`llx!|R#Mi~S2{mAxd&u7X&qdaFLFUE)9eFz2Z%6()ofzH z;y5=kiwz=GTQ3(V&XP~c_QZ~VbT|P-UMr{NcpAv|gjTP7-)35=ky>2{glqt;cbAE3 zL(1zEXDPql`#wDRvChXrWePRb%)}-(=sm}royvf8|KT3|hKTnf^23@?* zpt6ei#uPb7XvBs^3FKg^QVS)!5X02d1FpkCd$+Lk(QxLpDng=q^3g` zff(R!h^2f>%5ym;)pI74LoQy$H+2YDV{u}hv@2hv>sU}o^;Gt{>YGaG>DVYi-^$v#qG?GUu7zEy|Y=#2P@Pb_F zq;r<1hYEp0=$loX!XF0;p(}Zy^997D$SE9Sv8@osIzG6AB^Cyjm}<`106BROfji&3 zaEBQfI3s=}1LrM3mV9)WFBw!2y-p6@6E)kgFXImg zuvZ{3#nTl1p7sVOm|^0bAZwCc(X`DBum7|wpfHJJCMr`e(x-+=rPIC;o*#j-lbo#& z<*`Fy0AHna&SVOZ%3bBH?u6JfGa(efQa1o*veOusBPlsNvQu?tkt0O$I9|dwz5Vk!{(^dMul5pqYqg1O?tcDo3gfsNVpfD%k1ONH# z-F4{`p(YLMYD5hLVvVNbINDty_B~QHF_NqSw>QWD=OKXkRB@H<;q;VNhA3M*6d*<2vl6R%@IXW}E%#-gkwwT*U z)4m}#B{;U|2EH-QSR=l4(#my7)}sj5V2?C7VpRB3F*w}SRK>Ek?a!$4+Mu-oK{5}C ztlt!W>6=MX5rv1x($av6NaM#C*pvA-o-<-OT{jPW_Ea#Z<6!zh;ZLRM)?*OF4}?(+ z!VteEi&I{)5Al)B)grG6PNR_$%~HT>Z>?`$!#1IjxA8RbjJMImY?*NPkPvP_4C{{j zUaOZL5T|$za%@iJ5nbq4V?nB;LqZgf*)xC*Pgu=}-Q|LGANbkPc74Op_Htd2oG@@7 zR*8oN#E>B`t#;ic6CtivL`bAnWN$ec;On}oJ20vQ>VokW?v7+xRQbjF$EP$Ua2l2d z-h*$ru7uh@a{A)Pw{=lmg2`v>I5Y)F&3h|>L;6sFAt;zGo1gHYx^a`IOCXE~jRL_; zpCjNjusTR9eqog_QZQp6_ZVrvH6|w7dT&9^>5UH;(g)9NtjSKGL8i?S!rGdhrpxH( z1}Y<44P$DT@G{~uS%Z$7v$-%cB+zHM_rX5<|KKj@FYI07FA(<11Iw!U>Di%=6=-5z zQAe#IQ5WAEg6`Cu9i8u6lOZ~IL zZww`Bp^|hwByaNQ0S+(NC9pRgxc%FZq&QIjVjpF_ulUhc(~j}BEyDe{CMlA>Swpy& z?YUJ-yoFzQopMpRt@A0PW@|y9WvadV1gO6Q^Q)L9%y~&|3SR%+2Et$D+%Z8{z4*X3 z|J6}4g9%!3w9=rse_KUsW3OEX={5Q!-6yToi0~s$H>0v-7BdfuWdG!**?QLN-mHC5 zt*a1rN>i^0q_g1`UY)>xth}=F=i{}P4E3!T;GKFoaoNw88px$U{xS0mkT>w5ZNDgfjXLLnGlfejx^#;4>zjeb#V%Yw?G$xoP5(>vRE`n$3n^ z^!gT@x%LLf0;jA|U$?z?RH=F@JS2WF$j9EGf`OYtRfShna>}fwl;368-N9YHNXdyE zM5#DJyScq?gy|2^M>?mWigjIX4%{B9U>UKT+9Dk>l4~V`IMR?s;w!Q~pDp2)Zlp}n zgawhS2jR95p9Y^VQ^B`n6U92D$2sTv8Lw02Ep*f~NtaA>PO zlexp*XO^h(#Q+{MTaed$m$;ctWXEoz*U+P{r7ZRM7KUYcLs&V zZ*C2)`U^fu2arV@{deQ8!t*i`VSrb++PczW1P!`v&W`D0GACEV2w{%oBa z9mZv!;s#ekJ;!D7LRu3wXqRFpTsp?aY9tS9>oI^P7DrzePtDTV{FuBnNKpB@D)L_2 zqq$rG>`C3thk!eblpyF2cM!Sv`Pa#ub0TEEotNYbDU@b@!IzTbZIYCZKouztH&Y!U z)Ks=2g0)_la{el>xJ4X*cZ|pyBpe84ot1fg(yeahTL^MedZpk4vU~V^jT(q;S4LhG z$GTTV2N1Ep4OfOR@uIQrj$6Dd-+nW9NZO&xr}`h`^pOnxEiRG3drvwy`1TC#7OUaS zCNsTc2(;qj%2Gp&t`vxdi;>MyGgec^QT_JGyZvMi=ChJGuZ`Slr1>0z|2{Z(?4ffP zfd(IqTb3=&YP=g9pY<#93=~CjfUl-TB`ZjJ&X-q1Oxm8Y?9}P{z~gra&uU1Yn!W+f zixrIZz60INJ{1pSVw(Y~Jp&t4ySCtYU-oe`plq5g!-uQLeHN$5<;oXIprrm?6AY)Dn zCs{%-8*L*RbemBR>34iC(GM^U2>d7s&G%&L$JDLjn7KPVI9K4vhx)+Nu~NC{=t`&|6hHDe zBnj?z``#eWf2;_~wev5CYEWo-ub+2aJW}7DIB)PqSINg`(W|3nnvjZX?3)(!G0D># z0@ljo4;YdmUB`Rn3L?zJE>lZ0BFm8Z{Tr^zW9#}Q_Qpc`p(fb^)iOKFlCqmaXyOhc z?<2?C{Zdyv`a^J^iRKP&6pmtLZ~{4~sWIA!MiO3@c66cs9Bd#=D}9NKG4IK4*b=dY z&kWicv|d;R@@E6gwopTTIT+2}Zyw$7>j)ZOd`v zoAy&Bkc&%dkCDcNyaL$M60L^0>YiF9E|IOH**YJOHbf8Lw*p_$NDgz|&Bj{JD)6N%Vk zNChu6?#ApLXAPa*Cp=e&)ZpH?XEzDAMYT$=map^%SXG4p?Hn(eaI)N!FT5OvA1(Gn zA_s2W99-S3iN`<1MO+wUFZGXgf*E;Eo=!SQ{B$;(MaG8j7uO1d*L8B=KQ?H$Ox^ni z1Z$GZc6)XlzP?qUs8aq{N1na9@^1wf)?^V{h?QyC+sGB$C1*tnyEUzjt48+3zgUQ-fqqbuM)b?d9 zj5A)dxix`Yfd>v-*GC+*hlIP_%A*LPasW9kyY&AIlR{+ZbbZ#!s&8oM4}j ztstA!Ly3%WW~2Vo4Cj(KCzhk0dPa>g{jCn{_f>AIsBHc6>aJSb=1Qn9=6sEN@QN+C0Z@Uo@OaHir!|m{>mkfpp%+mHHMosl1xU(O)q3ERu0bB2o!GBay^&2FW zm$JKYXm63o@sb_W31hH*8UtH53u7?$29Y!=md(T2FWKJe`X*`|((r+yT3ny| zSWwxq1yISlwSJ$~9HH5O0^SIvy@HBT$3x-$;Pf&!Z*mzD(eTP^}Z1mbH zS7^5dB_XQr=k}TQZNB==P>{;7^>8b{+M9=OJQ-$oK7?Z8o8I8^Or8~CxQaxZDO}8P z9|}p}HyRn7j3~=ymswP36M=9MN#Vx|^L)NPc;MkHLuf1htq_`X2dLitf*4?BNm7_S z(06%cq*GV)z~Xy$Zu6(<`wa4r-eg7eJWXaWI%^o8KUz5dCmMu@8)m)Un~UQO?`4?tz9oa!=U{xTTq22@2!o7P+UT~TGB7owm}V)`;8X8AHRH995E z=Vt0tuCnCmO*5+8dp)yT^%3plzA2cx3zgp|-xx}>Qi=O3zY8M^#YuxO@LpAsZf+2r zLWzC6zDt;)*K)0g%RikzXBba^<1J*cfq=hNTkbjfLb$@cvCtNK9dR%pXwx*D1LAFY0P2s_Oy?oH6kv0q_DD{)=~ilC+B3S%9`xEcm<+InxU9HD0`^nJt3 z#P`+AuIAtV0Z2i#@K8UPwojUWehDF?0<~9;n~48Q4S29~(10%yzzOrO@5(&}5)WfM z`j_bcXnE)f$ZKWSdoA&gXMbA(PA}k^^9SiWf1mAtS|^pQd9L;8=)Zn@7L>?S@%;^) zj{owm4ES>SCjEX7o{`iMNGo<$?I>D~aOT=u zOTYLQfYY=+F65L5B_5NGJci{%8 zLH8G`@hA_qPiGf3;DANb-q6>4pk2^=X??N#V;&A?y7?cKqUCljQW&w@LrT<>Ack7+ z9-l2pA!ds!Z-89lb^K>#$~gcX9@*E#?kb>#LSGs3oI>wnY^fB1Pd19;v!LX=z7?;& zMvy@K{>~qR%EcJF+h9|siMzK}9x%@1kv!1cnW^bSSb?h++A{MtbGr4n+~D+Pi~GcD zsl=v~caqETvS3$jJPBU@3A^~z64VyrsLD6LOQU35N2~YLBOs(*xP-MYFD13?_l6K_ zv0AJ0c{g|If7Je(qg*&?LJFwuY-1*w6{FH?pA**x`*`AE5JA*Z69-3^xDN45O~hOu zgY>6mFa2@~oyWJ8?3shlNr+3NWp(GA_WZnvuX=K@<9pFy+Gx*H%R=iSd+mt8z)Obb zub@FH9&?TT`$+YWOe0pwbKHW4rDE>KokqUlBKWYUD{E`8eiBq?3wskblpSu<96C49 zm&W>L?zi}!toi!JlFX7kLmrS1LlP!cs>!_dPYVqAbc`16wh2|_R32o-YsKp-Wt3wC zxOAkezTi(yz7j2;?Sk4|#Plr@=*lIwZ1m(=CfZA&@Zy)x&qwj@(vTj3^@uzEmukX# zPdAnuJlWqPO(MXCL`v1)5!|6qz62IoD)m?pisTYmT)y?7_E`*HM+b(CC5~EljETxc z8l-Ho9g;Ekk8Wt%99nl@=z$B-N(y$xkeRhq#KMZr{0+qKtIMpojC!^$QLjLHNM`e& zF1Ifk&v`@7o1^co&G|oXS1tq~I4I#g+Qo9jt5lHM|9PL`H;=!vuLv8zd|UO^-d=u^ z`fTHHn6B@|5q>6@uv5N&_)0(s{=k*1c>W4cQLa#TaZqSs>DX2&VN;fJE&`vIU24R? z8KjoVZsg?1m(2~gwbJvsuu*q$$PI%iacM+??p(R6HAVCk94~De`c3$b@lx zm(vShai9A5+|>MaQpfK%7F3svZm+#IuV=7^>QEi>_q*buW#SSBNoY5$tNn{|hax~x z8@!d8)ed0+YQ`O|$e~rrgP7~uew|uVc=F6C2-dQkRB$Zg)q%9p(yiAY005LrM1h+D z9vq2|o96DiDS^9;==l=&;HzLI+FTZ|OWXfeE)mvuR)@Fm4&t0q_zLCHTQ%Nz7{M%{ zq#Y~5GNu-Ni!8UrtF8aL=-*x64(#i&p_Ppnyiqs&_dpb@z^;Z;63pGwd#7Z*d`1- z%x64#>p!Mrx`cvaRf^l}cpuf7{i8T-#Z_phyG}O`J=G3|>w@V4rujxnrK=5=F%OuX z{Lm0?eZu{Xu5GehA18c(zRcu$lTyV32>H8e85_@dxV?#ZAHm>$w()V8vwa77;4!nq zB~eid+)kN(Z@H%k-^g&aK+#vh8=Jo6-g6V^jDu!bOKsGqhW< zpX-p+%UURHfY8r>f;@0%{pb1zRlHrY8+n9x)>bFg)5^RY6Hf<~@b7iEyC-k!QR$Tb zX(TcYnr^){X}|EC1|A!b+~#tz3L1srX+GErt3yQlc^}i+&vTN#A(g(l&=~q- zfzT1MO3E`&-n0iZ)pCR$yK_<@(=0D9Pnd^V&emN0Oj1Tg9DbeU~pyKmBn!nimf?=#j? z;oDD!MCfiPLdH9116Ibe2FgpH_ujrJ!?%jup!!@5R#MmI38LK6AYZ$L+e8%in$fZzQJ^3D* zCUGxSNy?OFXVtxKb8=uXU+QVZ!z0@~nfxRtLFS;to#nH&cu!WlbVH1quXH2a>WA@? zp3RW34S5h{nW==$%%47~)~O%G62p3S=WX-i`qwUfRjCimV{bu4_DW9PAn(q{I2wzD zN^OqNoNrE6wCfta((D|&!*>ojlY7*WWMw&+kWM-dIbVpMD&;3m$U#ww{OZ){m_R3qSIRSIab%nyIm*ZZ56Sx+MRjgR6kAOLp?K;?}NWc_E1(lobSeI9Pp%T zeRh}eu=5KGF~v=J$>FaqHNiIM>4i&24u)XHklP)Qh*QF2a>#)J6D@Xpn=TaRLW3bH z-*fYebF~ZuvGuIQIp-CQR@)Ut47eH3h5L1x-VYq7Yc3yu$sHYmLn-zpHq>^WSFx7@ z9hYpm5;y8Euift{admaQCVQVAV~$)~>$!MOBqOBQFd*+lF@S#COy|fMqEIEq{Lxv+ zY?s@azDebw>*8)r3}~IQB70-eR=&rH0ciZJX^(5y5!<&^VnEW(Vg*FP6v=Sl;Vk$=L5-+vig z7T>J95bzGOVYfK-@mf`GqGAB&K^iUjVH9OmCtdgxCD|y9W`gqiuk!XR~<9ykY z*}}Lq(;voLY|VjaYPh{X6TVK{vu;#1L2v@&r0m$znVC1ID8=2oyf}|EzAhPk;2xu< z59g1-Sfq8Sfque~#iPNfjR(yxL!UQ8&_w)jAhp2HfpMlCBFOp5)xb`&UI+;;&~@J7 zh#!Rj?MD;o0XLcG-VF2xLMBcW+|siANN{> zR8VUOE(OTbUN3teOdf-`j&C`+2JJoh`x{@#FZ6%^p7iZ;KA@w9BGRq*wbtJcFkiwE zlA!)qYmp{U=nxETV95D@zT)73EZhJ0?@6K#qo#y#ua9B(Df5pup&yXse>){9?(G)N z>BvFED+RT?_MiOyKncM!|KEXe2vw4iRZ#g9mwH^3U z*;}Rmc#%wwc;IiRdjq3X$?=6aJT}hC+SWDy_&t~fEd&SuzhsdEtU0OXfN|O5jF2PQ zJbL-|Vs(~bRc?sP`6j*c5}gurPevf!TF!cIt0G5fUVOPQxy6bO4E5 zoNGqPU)C{OR*9o0|33gN*KRtu>JCqzI`VORs_fP(Orfxh~>UQqwtD36zb_Nh8 zwf|up2=Sgyk>xi<$SHdH8LWE(tsuX3d2b>x%BQ|`z691?@o*-!F_k|Q86EBac;(40 z@+_`iWE{%dW3i=1u@5tObe}h=bV6hfRz?NEyCgmSGYDc(W-7nxccI8$(8syUP#TSJ z8Io)ESUM|~Exz@Hoqm3{Pdn&L#`kZO^dqN)orJI~`KyNQD|OdsPCSqEc1rG5cAMdm zL7Ue%cq}RBXBNjSMjvFKVUF$25bVwIT${6c62v&R-sd$BCi5NlBK2!a^=FH>oLC>P z<~XZQW-70PedJlSrM4UjL*mPpR2@4{l@pa1nvt`w71J7J6A%n`gZ)L+3*IZI2fM7t z^5=5I|I{s2y)Ft~D~Wsap6Rd`gQf0Y{F8Tl5}#ew)A3tiXU-;d6i%EF&ayhzViO@Z z{<)ykO1ht8j|wxh0n;>1A;0XhynDG@U=3-ppyjObfkCS7a>O(fvxMB+6B2xsnOJoy&I&TM=}ybn2@DgFEY77IQQ?MB3>4yPfz2fmK1 zcL9zK`5pA^W#A%N+i_kKi8PCBP6rl;lC|?rw*+6APta|0+viFT01{GOl5X2ffs%2< zoKCk#F&MsGdHLFV%CQsMF_m;)ebe>Si;gSC#(tw$Vb5}_P3CTpS6}&UHaEz4FM`by zu{d+3QX4VCh*JLKWmtUEM5?`#iW?aMX^~%6|epdGAvcK z3N5U>yr)VkQt4&|7;!*H!$Jko7<@^xSCYw5T|BDrLX-3F2TY$uQ?|yjpR=nbzcEflu+haGIVAD? zmTj=7f-d<&K9_Q<;%aPj5=~1AY2l|doq!MOWNxe{S;S2-BN!=03>Bgx<7W1iZKN?! z{#&@T9pt5wRMEqZR4?0DZoB(}fpg@{@+LsF@`01eAv(_@^exQ1m4f^{t{RE-nee5{^6uwCl0?E5L6R|5zt zeU6~C(Pkf7Rq!q%z%v20&YbUWWOt+jRi|eVd5isIgy!2A#ix1J#0}##F+qSUXO=+$ zu=2Sci(5q7ZyS4{G?E*PTB_D>Ox%!h|c$E3aBca`q=HJ>2X^0<;A?&YdBOwoag;FI1w=`OEzKbUv%zZ1%vsf zRQG6Z>ye5dhrE{;TdbI@VvZW+SZa!<@gE^31g{x;re>$zb;As}Wuih4C_CaCE?&1` zn|e|17y=2Ty{Nzikg#)rOU;OZQ1mMA)qF~NG;TOxiu09*0#EYC`*+?%Hzd(axy+0U z)FF<@IG!%IP`})@4-Ur#z~*22k!fcLcc`z6zw6_<&v?`3h4AE0+hb0>{zsm?Vwf=Z zsefkGc&IlJ!eSm&e(zpq4kqDNabcWpush32r1d6t;;{8Z4rZ9J%=%;VR+oe~KDP#| zcsJfr?P<+lb+^Gl3u!{|e!R%nIW7e66?Ee_ zxxQr;^VkKZS=Zp%;UqEWb5QqqcVn(i#gi2Aisnrs$61O+EI4 zRseoPRG~+H-4$`_s9~v>8%7W5_jZnjxyz~T?uh5yixh01VMjsJpc5t}An(aHTFIR>?7y_ZDQM!KRj;5Q( z#haV0IfB?5m?{Gt?MnITPtHZh){!?}bG#^Z|7VNy)%*4$kjSlwu2jcW`fX(>)Iye+ z1$CbaW|dc=vSmYqOAf8p`f+*!DqH#eyhV&T0!B<63euJ@#H~yss0Xp4tNs*PmUvfV zr+AXpjSTrA@Ke8Vc|fLbHR+zQan;zf;^ouHYhsEURpSB{nMQ^zo_}hw8jCUs@wvP= znX1QohxgH8g*!m9BP$^1d1olshsq@9`CgrcWWWPIca;aMjJ9%$g|35Y8$tsHXXXs^ zv2Wr)Jt6+iQ-MO6J`W7n$C&Ltqno^e&om-R>@Yr z09v2p@MYd#*x<(I!fKqfbb$coYp$m;L(S%+eXQp_4vc8zc(xq*&LS~8^y#Di@TE3+gR4_W&U64M3((zh&c9B-ut;G;uKEHBHu(2TNRI>a)F0YcF zwv@wLK&GHL9q(8o#h$E=>l zdR`?v~aOx+Vnq&M?}r33s@TxuN>X?jS{ute@0f%>Zw67*X4f`Vs zt-%6hMZ_rTC~U6z%vx7v+m{#M(l_bG>H3di)$-Ud`zogzNIXkOqTUWQ+Yep$6~!|7 z`q688IJX(aoHl`(7N=Iw@v&~TgrvQ8C{~|NQ)HiI#xpbrPURQlhKrWJnzSXyDX%d3 zN;8Co-Q$FWCCG!p`N*OV%eTo1WGS+@ec)rga{1<_ldB@W@je)sDADQkgFM3c8l@d4 zt%{Tkc(OP0Pp!z;LCQ&n~!Cpnb8xtnXMGv(}8$B}zG4%lrqQ;80E9iSY?=g%+tz zU`|CR5>A@J8Ln94#2`<6xIGj?wzZD-VjV{<#Zue*#5 zbHD4sG%@M?eqSRD0MBnFTbq3!KI3c}fYV#ci^7_r4MsH7RPnO9! zYY%@P)AJ3eS6=`oRMnRl`&2UY_q*jz{k7hL}HDd}Cmqg0d-$>lGX-u=t)!8r#LePM8C=igp- zXXkQ54$}QGA}RI1+as-uLzIxdSG;QfGUsb#WEDrt`EGIjgGjsm{?Y<1{*Pt*l>Q&f m{!<|Tx61x=J@p>il3ib$t>dA1U Installation <../install/index> - AIMET User Guide <../ug/index> + AIMET Optimization Guide <../opt-guide/index> + Quantization Simulation Guide <../quantsim/index> + AIMET Feature Guide <../featureguide/index> Examples <../examples/index> - API Reference <../api/index> + API Reference <../apiref/index> Release Notes <../rn/index> -AI Model Efficiency Toolkit (AIMET) is a software toolkit for quantizing and compresing models. +AI Model Efficiency Toolkit (AIMET) is a software toolkit for quantizing and compressing models. The goal of optimizing a model is to enable its use on an edge device such as a mobile phone or laptop. @@ -23,43 +25,45 @@ AIMET uses post-training and fine tuning techniques to optimize trained models i AIMET supports PyTorch, TensorFlow, and Keras models, and ONNX models with limited functionality. -.. grid:: 1 +Quick Start +=========== + +To install and get started as quickly as possibly using AIMET with PyTorch, see the :doc:`Quick Start guide <../install/quick-start>`. + +Installation +============= + +For other install options, including for TensorFlow and ONNX platforms or to run AIMET in a Docker container, see :doc:`Installation <../install/index>`. - .. grid-item-card:: Quick Start - :link: install-quick-start - :link-type: ref +Optimization Guide +================== - To install and get started as quickly as possibly using AIMET with PyTorch, see the **Quick Start guide**. +For a high-level explanation of how to use AIMET to optimize a model, see the :doc:`Optimization User Guide <../opt-guide/index>`. - .. grid-item-card:: Installation - :link: install-index - :link-type: ref +Feature Guide +============= - For other install options, including for TensorFlow and ONNX platforms or to run AIMET in a Docker container, see Installation. +For instructions on applying individual AIMET features, see the :doc:`Features User Guide <../featureguide/index>`. - .. grid-item-card:: User Guide - :link: ug-index - :link-type: ref +Quantization Simulation Guide +============================= - For a technical overview of AIMET, and to see step by step how to apply AIMET techniques, see the AIMET User Guide. +Quantization simulation (QuantSim) provides an approximation of a quantized model by inserting quantization operations in a trained model. QuantSim enables application of optimization techniques to a model and testing of the resulting model before the model is exported. - .. grid-item-card:: Examples - :link: examples-index - :link-type: ref +Examples +======== - To view end-to-end examples of model quantization and compression, and to download the examples in Jupyter notebook format, see Examples. +To view end-to-end examples of model quantization and compression, and to download the examples in Jupyter notebook format, see :doc:`Examples <../examples/index>`. - .. grid-item-card:: API - :link: api-index - :link-type: ref +API Reference +============= - For a detailed look at the AIMET API, see the API Reference. +For a detailed look at the AIMET API, see the :doc:`API Reference <../apiref/index>`. - .. grid-item-card:: Release Notes - :link: rn-index - :link-type: ref +Release Notes +============= - For information specific to this release, see :ref:`Release Notes <../rn/index>`. +For information specific to this release, see :doc:`Release Notes <../rn/index>`. | |project| is a product of |author| diff --git a/Docs/beta/opt-guide/compression/index.rst b/Docs/beta/opt-guide/compression/index.rst new file mode 100644 index 00000000000..ababf1736eb --- /dev/null +++ b/Docs/beta/opt-guide/compression/index.rst @@ -0,0 +1,5 @@ +.. _opt-guide-compression: + +######### +Model Compression +######### diff --git a/Docs/beta/opt-guide/index.rst b/Docs/beta/opt-guide/index.rst new file mode 100644 index 00000000000..67471a25bb1 --- /dev/null +++ b/Docs/beta/opt-guide/index.rst @@ -0,0 +1,21 @@ +.. _opt-guide-index: + +################## +Model Optimization +################## + +.. toctree:: + :hidden: + :depth: 1 + + Overview + Quantization + Compression + +This user guide is organized into the following sections: + +:ref:`Overview ` is a general discussion of how AIMET optimizes models. + +:ref:`Quantization ` describes how AIMET applies quantization techniques. + +:ref:`Compression ` describes how AIMET applies compression techniques. \ No newline at end of file diff --git a/Docs/beta/opt-guide/overview.rst b/Docs/beta/opt-guide/overview.rst new file mode 100644 index 00000000000..b0e676a1570 --- /dev/null +++ b/Docs/beta/opt-guide/overview.rst @@ -0,0 +1,5 @@ +.. _opt-guide-overview: + +######### +Optimization overview +######### \ No newline at end of file diff --git a/Docs/beta/opt-guide/overview/index.rst b/Docs/beta/opt-guide/overview/index.rst new file mode 100644 index 00000000000..179bd1d83a8 --- /dev/null +++ b/Docs/beta/opt-guide/overview/index.rst @@ -0,0 +1,5 @@ +.. _opt-guide-overview: + +############################### +Optimization technical overview +############################### diff --git a/Docs/beta/opt-guide/quantization/index.rst b/Docs/beta/opt-guide/quantization/index.rst new file mode 100644 index 00000000000..d334459780d --- /dev/null +++ b/Docs/beta/opt-guide/quantization/index.rst @@ -0,0 +1,5 @@ +.. _opt-guide-quantization: + +######### +Model Quantization +######### diff --git a/Docs/beta/quantsim/index.rst b/Docs/beta/quantsim/index.rst new file mode 100644 index 00000000000..59b3b817db8 --- /dev/null +++ b/Docs/beta/quantsim/index.rst @@ -0,0 +1,5 @@ +.. _quantsim-index: + +############################# +Quantization Simulation Guide +############################# \ No newline at end of file diff --git a/Docs/beta/snippets/torch/apply_adaround.py b/Docs/beta/snippets/torch/apply_adaround.py new file mode 100644 index 00000000000..c09013436c5 --- /dev/null +++ b/Docs/beta/snippets/torch/apply_adaround.py @@ -0,0 +1,22 @@ +# Apply AdaRound +from aimet_common.defs import QuantScheme +from aimet_torch.v1.quantsim import QuantizationSimModel +from aimet_torch.v1.adaround.adaround_weight import Adaround, AdaroundParameters + +params = AdaroundParameters(data_loader=data_loader, num_batches=4, default_num_iterations=32, + default_reg_param=0.01, default_beta_range=(20, 2)) + +input_shape = +dummy_input = torch.randn(input_shape) + +# Returns model with adarounded weights and their corresponding encodings +adarounded_model = Adaround.apply_adaround(, dummy_input, params, path='./', + filename_prefix='', default_param_bw=, + default_quant_scheme=, + default_config_file=None) + +# where +# is the prepared PyTorch model +# is user-defined +# is the bit width to use +# is a selected AIMET quantization scheme diff --git a/Docs/beta/snippets/torch/create_quantizationsimmodel.py b/Docs/beta/snippets/torch/create_quantizationsimmodel.py new file mode 100644 index 00000000000..a400476649f --- /dev/null +++ b/Docs/beta/snippets/torch/create_quantizationsimmodel.py @@ -0,0 +1,13 @@ +from aimet_common.defs import QuantScheme +from aimet_torch.v1.quantsim import QuantizationSimModel +from aimet_torch.v1.adaround.adaround_weight import Adaround, AdaroundParameters + +# Create Quantization Simulation using an adarounded_model +sim = QuantizationSimModel(, quant_scheme=, default_param_bw=, + default_output_bw=, dummy_input=) + +# where +# is a model to which AIMET AdaRound has been applied +# is a selected AIMET quantization scheme +# and are the bit widths of the quantized model +# is any data that conforms to the model input shape. It is not used. \ No newline at end of file diff --git a/Docs/beta/snippets/torch/evaluate.py b/Docs/beta/snippets/torch/evaluate.py new file mode 100644 index 00000000000..55d8923aa28 --- /dev/null +++ b/Docs/beta/snippets/torch/evaluate.py @@ -0,0 +1,3 @@ + # Determine simulated accuracy + accuracy = ImageNetDataPipeline.evaluate(sim.model, use_cuda) + print(accuracy) \ No newline at end of file diff --git a/Docs/beta/snippets/torch/export.py b/Docs/beta/snippets/torch/export.py new file mode 100644 index 00000000000..e36a1a3a3cb --- /dev/null +++ b/Docs/beta/snippets/torch/export.py @@ -0,0 +1,4 @@ + # Export the model + # Export the model which saves pytorch model without any simulation nodes and saves encodings file for both + # activations and parameters in JSON format + model.export(path='./', filename_prefix='', dummy_input=dummy_input.cpu()) diff --git a/Docs/beta/snippets/torch/pass_calibration_data.py b/Docs/beta/snippets/torch/pass_calibration_data.py new file mode 100644 index 00000000000..cb998a2c7f6 --- /dev/null +++ b/Docs/beta/snippets/torch/pass_calibration_data.py @@ -0,0 +1,35 @@ +# PyTorch imports + +import torch +import torch.cuda + +# End of PyTorch imports + +def pass_calibration_data(sim_model): + """ + The User of the QuantizationSimModel API is expected to write this function based on their data set. + This is not a working function and is provided only as a guideline. + + :param sim_model: + :return: + """ + + # User action required + # For computing the activation encodings, around 1000 unlabelled data samples are required. + # Edit the following 2 lines based on your batch size so that + # batch_size * max_batch_counter = 1024 + batch_size = 64 + max_batch_counter = 16 + + sim_model.eval() + + current_batch_counter = 0 + with torch.no_grad(): + for input_data, target_data in data_loader: + + inputs_batch = input_data # labels are ignored + sim_model(inputs_batch) + + current_batch_counter += 1 + if current_batch_counter == max_batch_counter: + break diff --git a/Docs/beta/snippets/torch/prepare_model.py b/Docs/beta/snippets/torch/prepare_model.py new file mode 100644 index 00000000000..6c0d6621622 --- /dev/null +++ b/Docs/beta/snippets/torch/prepare_model.py @@ -0,0 +1,5 @@ +# Prepare the model +from aimet_torch.model_preparer import prepare_model +prepared_model = prepare_model() + +# where is a torch.nn.Module \ No newline at end of file diff --git a/Docs/beta/ug/features/adaround.rst b/Docs/beta/ug/features/adaround.rst deleted file mode 100644 index 60e81534954..00000000000 --- a/Docs/beta/ug/features/adaround.rst +++ /dev/null @@ -1,102 +0,0 @@ -.. _feature-adaround: - -################# -Adaptive rounding -################# - -Context -======= - -.. include:: ../user_guide/adaround.rst - :start-after: adaround-context - :end-before: adaround-api - -Prerequisites -------------- - -Model, GPU, CUDA, dataloaders, dependencies. - -Workflow --------- - -Step 1 -~~~~~~ - -.. tabs:: - - .. tab:: PyTorch - - PyTorch code example. - - .. literalinclude:: ../torch_code_examples/adaround.py - :language: python - :pyobject: apply_adaround_example - - .. tab:: TensorFlow - - Keras code example. - - .. literalinclude:: ../keras_code_examples/adaround.py - :language: python - :pyobject: apply_adaround_example - - .. tab:: ONNX - - ONNX code example. - - .. literalinclude:: ../onnx_code_examples/adaround.py - :language: python - :pyobject: apply_adaround_example - -Step 2 -~~~~~~ - -... and so on. - - -Results -------- - -Optional. - -AdaRound should result in improved accuracy, but does not guaranteed sufficient improvement. - - -Next steps ----------- - -If AdaRound resulted in satisfactory accuracy, export the model. - -.. tabs:: - - .. tab:: PyTorch - - Link to PyTorch export procedure. - - .. tab:: TensorFlow - - Link to TensorFlow export procedure. - - .. tab:: ONNX - - Link to ONNX export procedure. - -If the model is still not accurate enough, the next step is typically to try :doc:`quantization-aware training `. - - -API -=== - -.. tabs:: - - .. tab:: PyTorch - - :ref:`PyTorch API ` - - .. tab:: TensorFlow - - :ref:`Keras API ` - - .. tab:: ONNX - - :ref:`ONNX API ` diff --git a/Docs/beta/ug/features/index.rst b/Docs/beta/ug/features/index.rst deleted file mode 100644 index 5f4cacb0080..00000000000 --- a/Docs/beta/ug/features/index.rst +++ /dev/null @@ -1,141 +0,0 @@ -.. _features-index: - -################ -Optimization Techniques -################ - -.. toctree:: - :hidden: - - Quantization aware training (QAT) - Automatic quantization (AutoQuant) - Adaptive rounding (Adaround) - Cross-layer equalization (CLE) - Batch norm re-estimation (BN) - Quantization analyzer (QuantAnalyzer) - Visualization - Weight singular value decomposition (Weight SVD) - Spatial singular value decomposition (Spatial SVD) - Channel pruning (CP) - - -Quantization aware training (QAT) -================== - -.. grid:: 1 - - .. grid-item-card:: Quantization aware training (QAT) - :link: feature-qat - :link-type: ref - - QAT fine-tunes the model parameters in the presence of quantization noise. - - -Automatic quantization (AutoQuant) -================== - -.. grid:: 1 - - .. grid-item-card:: Automatic quantization (AutoQuant) - :link: feature-autoquant - :link-type: ref - - AutoQuant analyzes the model, determines the best sequence of AIMET post-training quantization techniques, and applies these techniques. - - - - -Adaptive rounding (Adaround) -================== - -.. grid:: 1 - - .. grid-item-card:: Adaptive rounding (Adaround) - :link: feature-adaround - :link-type: ref - - AdaRound uses training data to improve accuracy over naïve rounding. - - -Cross-layer equalization (CLE) -================== - -.. grid:: 1 - - .. grid-item-card:: Cross-layer equalization (CLE) - :link: feature-cle - :link-type: ref - - CLE scales the parameter ranges across different channels to increase the range for layers with low range and reduce range for layers with high range, enabling the same quantizaion parameters to be used across all channels. - - -Batch norm re-estimation (BN) -================== - -.. grid:: 1 - - .. grid-item-card:: Batch norm re-estimation (BN) - :link: feature-bn - :link-type: ref - - BN re-estimated statistics are used to adjust the quantization scale parameters of preceeding Convolution or Linear layers, effectively folding the BN layers. - - -Quantization analyzer (QuantAnalyzer) -================== - -.. grid:: 1 - - .. grid-item-card:: Quantization analyzer (QuantAnalzer) - :link: feature-quant-analyzer - :link-type: ref - - QuantAnalyzer automatically identify sensitive areas and hotspots in the model. - - -Visualization -================== - -.. grid:: 1 - - .. grid-item-card:: Visualization - :link: feature-visualization - :link-type: ref - - QuantAnalyzer automatically identify sensitive areas and hotspots in the model. - - -Weight singular value decomposition (Weight SVD) -================== - -.. grid:: 1 - - .. grid-item-card:: Weight singular value decomposition (Weight SVD) - :link: feature-weight-svd - :link-type: ref - - Weight SVD decomposes one large MAC or memory layer into two smaller layers. - - -Spatial singular value decomposition (Spatial SVD) -================== - -.. grid:: 1 - - .. grid-item-card:: Spatial singular value decomposition (Spatial SVD) - :link: feature-spatial-svd - :link-type: ref - - Spatial SVD decomposes one large convolution (Conv) MAC or memory layer into two smaller layers. - - -Channel pruning (CP) -================== - -.. grid:: 1 - - .. grid-item-card:: Channel pruning (CP) - :link: feature-cp - :link-type: ref - - CP removes less-important input channels from 2D convolution layers. diff --git a/Docs/beta/ug/index.rst b/Docs/beta/ug/index.rst deleted file mode 100644 index 69c6c3190dc..00000000000 --- a/Docs/beta/ug/index.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. _ug-index: - -######### -AIMET User Guide -######### - -.. toctree:: - :hidden: - - Model Optimization - Optimization Techniques - -This user guide is organized into the following sections: - -**Model optimization** describes the recommended workflow for quantizing and compressing models using AIMET. - -**Techniques** describes each quantization and compression technique in detail and gives procedures for using each one. \ No newline at end of file diff --git a/Docs/beta/ug/optimization/index.rst b/Docs/beta/ug/optimization/index.rst deleted file mode 100644 index 1f897969f7a..00000000000 --- a/Docs/beta/ug/optimization/index.rst +++ /dev/null @@ -1,3 +0,0 @@ -######### -Model Optimization -######### From 1526234efbe20375ff61cb704af1e6d902878ce0 Mon Sep 17 00:00:00 2001 From: Kyunggeun Lee Date: Thu, 7 Nov 2024 14:14:35 -0800 Subject: [PATCH 3/3] Make beta doc build optional for agile development Signed-off-by: Kyunggeun Lee --- Docs/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Docs/CMakeLists.txt b/Docs/CMakeLists.txt index 8b67abe23c1..e3051d78692 100644 --- a/Docs/CMakeLists.txt +++ b/Docs/CMakeLists.txt @@ -50,7 +50,7 @@ add_custom_target(doc sphinx-build -v -T -b html ${CMAKE_BINARY_DIR}/Docs_SOURCE ${CMAKE_CURRENT_BINARY_DIR}) add_dependencies(doc - copy_doc_source copy_examples doc_beta) + copy_doc_source copy_examples) add_custom_target(copy_doc_source COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_BINARY_DIR}/Docs_SOURCE)