diff --git a/Docs/CMakeLists.txt b/Docs/CMakeLists.txt index 8b67abe23c1..e3051d78692 100644 --- a/Docs/CMakeLists.txt +++ b/Docs/CMakeLists.txt @@ -50,7 +50,7 @@ add_custom_target(doc sphinx-build -v -T -b html ${CMAKE_BINARY_DIR}/Docs_SOURCE ${CMAKE_CURRENT_BINARY_DIR}) add_dependencies(doc - copy_doc_source copy_examples doc_beta) + copy_doc_source copy_examples) add_custom_target(copy_doc_source COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_BINARY_DIR}/Docs_SOURCE) diff --git a/Docs/beta/_static/style.css b/Docs/beta/_static/style.css index dd30c3fa814..76652d136b1 100644 --- a/Docs/beta/_static/style.css +++ b/Docs/beta/_static/style.css @@ -1,283 +1,1221 @@ -.rst-content .hideitem { - display: none; +@font-face { + font-family: Roboto; + font-style: normal; + font-weight: 400; + src: local("Roboto"), local("Roboto-Regular"), url(fonts/roboto/roboto.woff2) format("woff2"); } - -nav .hideitem { - display: unset; - font-size: 13px; +@font-face { + font-family: Roboto; + font-style: italic; + font-weight: 400; + src: local("Roboto Italic"), local("Roboto-Italic"), url(fonts/roboto/roboto-italic.woff2) format("woff2"); } - -.hideitem { - font-size: 14px; +@font-face { + font-family: Roboto; + font-style: normal; + font-weight: 700; + src: local("Roboto Bold"), local("Roboto-Bold"), url(fonts/roboto/roboto-bold.woff2) format("woff2"); } - -.rst-content .code-block-caption .headerlink, .rst-content .eqno .headerlink, .rst-content .toctree-wrapper > p.caption .headerlink, .rst-content dl dt .headerlink, .rst-content h1 .headerlink, .rst-content h2 .headerlink, .rst-content h3 .headerlink, .rst-content h4 .headerlink, .rst-content h5 .headerlink, .rst-content h6 .headerlink, .rst-content p.caption .headerlink, .rst-content p .headerlink, .rst-content table > caption .headerlink { - opacity: 0; - font-size: 14px; - font-family: FontAwesome; - margin-left: -8.5em; - position: absolute; - margin-top: -55px; +@font-face { + font-family: Roboto Mono; + font-style: normal; + font-weight: 400; + src: local("Roboto Mono Regular"), local("RobotoMono-Regular"), url(fonts/roboto-mono/roboto-mono.woff2) format("woff2"); } - - +@font-face { + font-family: Roboto Mono; + font-style: italic; + font-weight: 400; + src: local("Roboto Mono Italic"), local("RobotoMono-Italic"), url(fonts/roboto-mono/roboto-mono-italic.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: normal; + font-weight: 700; + src: local("Roboto Mono Bold"), local("RobotoMono-Bold"), url(fonts/roboto-mono/roboto-mono-bold.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: italic; + font-weight: 700; + src: local("Roboto Mono Bold Italic"), local("RobotoMono-BoldItalic"), url(fonts/roboto-mono/roboto-mono-bold-italic.woff2) format("woff2"); +} +/*****************************************************************************/ +/* Typography */ :root { - --aimet-blue: #3253dc; - --aimet-dark-blue: #0000ff; - --aimet-white: #ffffff; - --aimet-border-grey: #e0e0e0; - --aimet-menu-hover: #e3efff; - --aimet-menu-font-active: #0058ff; - --aimet-code-grey: #fafafa; - --aimet-light-blue: #e7f2fa; + --codeBackgroundColor: #f8f8f8; + --inlineCodeBackgroundColor: #f8f8f8; + --codeBlue: #0000ff; + --codeGreen: #008000; + --dividerColor: rgba(0, 0, 0, 0.08); + --faintFontColor: rgba(0, 0, 0, 0.6); + --fontColor: #252630; + --linkColor: #2980b9; + --mainBackgroundColor: white; + --mainNavColor: #3889ce; + --notificationBannerColor: #176bb0; + --searchHighlightColor: #fff150; + --sidebarColor: white; + --navbarHeight: 4rem; } - -/* In main body this sets the background used in the text boxes*/ -div.document { - background-color: var(--aimet-white); +:root[data-mode=darkest] { + --mainBackgroundColor: black; + --sidebarColor: black; + --codeBackgroundColor: rgba(255, 255, 255, 0.1); + --inlineCodeBackgroundColor: rgba(255, 255, 255, 0.1); +} +:root[data-mode=dark] { + --mainBackgroundColor: #242429; + --sidebarColor: #242429; + --codeBackgroundColor: rgba(0, 0, 0, 0.1); + --inlineCodeBackgroundColor: rgba(255, 255, 255, 0.06); +} +:root[data-mode=dark], :root[data-mode=darkest] { + --codeBlue: #77baff; + --codeGreen: #38c038; + --dividerColor: rgba(255, 255, 255, 0.1); + --faintFontColor: rgba(255, 255, 255, 0.6); + --fontColor: white; + --linkColor: #319be0; + --searchHighlightColor: #fe8e04; } -.wy-side-nav-search > div.version { -margin-top: -.4045em; -margin-bottom: .809em; -font-weight: 400; -color: var(--aimet-dark-blue); +body { + font-family: Roboto, "OpenSans", sans-serif; + background-color: var(--mainBackgroundColor); + color: var(--fontColor); } -/* In body this sets the background used on the left and right side of the main content box*/ -.wy-body-for-nav { - background: var(--aimet-white); - background-color: var(--aimet-white); - background-position-x: 0%; - background-position-y: 0%; - background-repeat: repeat; - background-attachment: scroll; - background-image: none; - background-size: auto; - background-origin: padding-box; - background-clip: border-box; +h1 { + font-size: 2rem; } -/* position of main body */ -.wy-grid-for-nav { - position: absolute; - width: 95%; - height: 100%; - display: grid; - justify-content: left; +h2 { + font-size: 1.5rem; } -/* boarder around main content */ -.wy-nav-content-wrap { - margin-left: 300px; - background: var(--aimet-white); - min-height: auto; - border-left-width: 2px; - border-left-style: solid; - border-left-color: var(--aimet-border-grey); - border-right-width: 2px; - border-right-style: solid; - border-right-color: var(--aimet-border-grey); - /* position: fixed; */ +h3 { + font-size: 1.17rem; } -/* Left Menu */ -.wy-nav-side { - position: fixed; - top: 0; - bottom: 0; - left: 0; - padding-bottom: 2em; - width: 300px; - overflow-x: hidden; - overflow-y: hidden; - min-height: 100%; - color: var(--aimet-white); - background: var(--aimet-white); - z-index: 200; -} -.wy-side-nav-search { - display: block; - width: 300px; - padding: .809em; - margin-bottom: .809em; - z-index: 200; - background-color: var(--aimet-white); - text-align: center; - color: var(--aimet-blue); +a { + color: var(--linkColor); + text-decoration: none; } -.wy-nav-content { - padding: 1.618em 3.236em; - height: 100%; - max-width: 1400px; - margin: auto; - background: #ffffff; +/*****************************************************************************/ +html { + height: 100%; + scroll-padding-top: var(--navbarHeight); } -.wy-side-nav-search .wy-dropdown > a, .wy-side-nav-search > a { - color: blue; - font-size: 100%; - font-weight: 700; - display: inline-block; - padding: 4px 6px; - margin-bottom: .809em; - max-width: 100%; +html, +body { + padding: 0; + margin: 0; + min-height: 100%; } -.wy-menu-vertical a:hover button.toctree-expand { - color: black; /* the button color when hover over */ +body { + display: flex; + flex-direction: column; } -.wy-menu-vertical a:hover { +/*****************************************************************************/ +/* Top nav */ +#searchbox h3#searchlabel { + display: none; +} +#searchbox form.search { + display: flex; + flex-direction: row; +} +#searchbox form.search input { + display: block; + box-sizing: border-box; + padding: 0.3rem; + color: rgba(0, 0, 0, 0.7); + border-radius: 0.2rem; +} +#searchbox form.search input[type=text] { + border: none; + background-color: rgba(255, 255, 255, 0.6); + flex-grow: 1; + margin-right: 0.2rem; +} +#searchbox form.search input[type=text]::placeholder { + color: rgba(0, 0, 0, 0.6); +} +#searchbox form.search input[type=submit] { cursor: pointer; - background: var(--aimet-menu-hover); /* color of toctree menu when hovered over */ + color: var(--mainNavColor); + flex-grow: 0; + border: none; + background-color: white; } -/*.wy-menu-vertical ul:hover { - color: #3253dc; -}*/ - -/* set the color behind all toc tree menus */ -.wy-menu-vertical li { - background: var(--aimet-white); - background-color: rgb(255, 255, 255); - background-position-x: 0%; - background-position-y: 0%; - background-repeat: repeat; - background-attachment: scroll; - background-image: none; - background-size: auto; - background-origin: padding-box; - background-clip: border-box; +div#top_nav { + position: fixed; + top: 0; + left: 0; + right: 0; + color: white; + z-index: 100; } - -/* -.wy-menu-vertical li.current { - background: var(--aimet-menu-hover); -} -*/ -.wy-menu-vertical a { - line-height: 18px; - padding: .4045em 1.618em; +div#top_nav div#notification_banner { + background-color: var(--notificationBannerColor); + box-sizing: border-box; + padding: 0.1rem 1rem; + display: flex; + flex-direction: row; + align-items: center; + justify-content: right; +} +div#top_nav div#notification_banner a.close { + flex-grow: 0; + flex-shrink: 0; + color: rgba(255, 255, 255, 0.85); + text-align: right; + font-size: 0.6rem; + text-transform: uppercase; display: block; - position: relative; - font-size: 90%; - /*color: #e8f3f7; /* toctree color - color: #3253dc */ - color: var(--aimet-blue); - + text-decoration: none; + margin-left: 0.5rem; } -.wy-menu-vertical li.current { - background: var(--aimet-white); /* highlist color behind toctree header when current */ +div#top_nav div#notification_banner a.close:hover { + color: white; } - -.wy-menu-vertical li.current > a { - color: var(--aimet-menu-font-active); /* font color when current*/ - font-weight: 700; - position: relative; - background: var(--aimet-menu-hover); /* back ground of the toctree header when current*/ - border: none; - border-top-color: currentcolor; - border-top-style: none; - border-top-width: medium; - border-bottom-color: currentcolor; - border-bottom-style: none; - border-bottom-width: medium; -padding: .4045em 1.618em; -} - -/* menu item color -.wy-menu-vertical li.current a:hover { - background: var(--aimet-menu-hover); +div#top_nav div#notification_banner p { + flex-grow: 1; + margin: 0; + text-align: center; + font-size: 0.9rem; + line-height: 1.2; + padding: 0.4rem 0; } -*/ - -.wy-menu-vertical li.toctree-l1.current > a, .wy-menu-vertical li.toctree-l1.current li.toctree-l2 > a { - background: var(--aimet-white); +div#top_nav div#notification_banner p a { + color: white; + text-decoration: underline; } - -.wy-menu-vertical li.toctree-l1.current > a, .wy-menu-vertical li.toctree-l1.current li.toctree-l2 > a:hover { - background: var(--aimet-menu-hover); +div#top_nav nav { + background-color: var(--mainNavColor); + box-sizing: border-box; + padding: 1rem; + display: flex; + flex-direction: row; + align-items: center; } - -.wy-menu-vertical li.toctree-l2.current > a, .wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a { - background: var(--aimet-white); +div#top_nav nav h1 { + flex-grow: 1; + font-size: 1.2rem; + margin: 0; + padding: 0 0 0 0.8rem; + line-height: 1; } - -.wy-menu-vertical li.toctree-l2.current > a, .wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a:hover { - background: var(--aimet-menu-hover); +div#top_nav nav h1 a { + color: white; } - -.wy-menu-vertical li.toctree-l3.current > a, .wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a { - background: var(--aimet-white); +div#top_nav nav h1 img { + height: 1.3rem; + width: auto; } - -.wy-menu-vertical li.toctree-l3.current > a, .wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a:hover { - background: var(--aimet-menu-hover); +div#top_nav nav p#toggle_sidebar { + transform: rotate(90deg); + letter-spacing: 0.1rem; + flex-grow: 0; + margin: 0; + padding: 0; +} +div#top_nav nav p#toggle_sidebar a { + color: white; + font-weight: bold; +} +div#top_nav nav a#mode_toggle, div#top_nav nav a#source_link { + margin-right: 1rem; + display: block; + flex-grow: 0; +} +div#top_nav nav a#mode_toggle svg, div#top_nav nav a#source_link svg { + height: 1.3rem; + width: 1.3rem; + vertical-align: middle; +} +div#top_nav nav p.mobile_search_link { + margin: 0; +} +@media (min-width: 50rem) { + div#top_nav nav p.mobile_search_link { + display: none; + } +} +div#top_nav nav p.mobile_search_link a { + color: white; +} +div#top_nav nav p.mobile_search_link a svg { + height: 1rem; + vertical-align: middle; +} +@media (max-width: 50rem) { + div#top_nav nav div.searchbox_wrapper { + display: none; + } +} +div#top_nav nav div.searchbox_wrapper #searchbox { + align-items: center; + display: flex !important; + flex-direction: row-reverse; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link { + margin: 0 0.5rem 0 0; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link a { + color: rgba(255, 255, 255, 0.8); + font-size: 0.8em; + padding-right: 0.5rem; + text-decoration: underline; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link a:hover { + color: white; } -.wy-menu-vertical li.toctree-l3, .wy-menu-vertical li.toctree-l4 { - font-size: 14px; +/*****************************************************************************/ +/* Main content */ +div.document { + flex-grow: 1; + margin-top: 2rem; + margin-bottom: 5rem; + margin-left: 15rem; + margin-right: 15rem; + padding-top: var(--navbarHeight); + /***************************************************************************/ + /***************************************************************************/ +} +@media (max-width: 50rem) { + div.document { + margin-left: 0px; + margin-right: 0px; + } +} +div.document section, +div.document div.section { + margin: 2rem 0; +} +div.document section:first-child, +div.document div.section:first-child { + margin-top: 0; +} +div.document section > section, +div.document div.section > div.section { + margin: 2rem 0; +} +div.document section > section > section, +div.document div.section > div.section > div.section { + margin: 2rem 0 0 0; +} +div.document section > section > section > section, +div.document div.section > div.section > div.section > div.section { + margin: 1.5rem 0 0 0; +} +div.document h1 + section, +div.document h1 + div.section { + margin-top: 2.5rem !important; +} +div.document h2 + section, +div.document h2 + div.section { + margin-top: 1.5rem !important; +} +div.document img { + max-width: 100%; +} +div.document code { + padding: 2px 4px; + background-color: var(--inlineCodeBackgroundColor); + border-radius: 0.2rem; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9em; +} +div.document div.documentwrapper { + max-width: 45rem; + margin: 0 auto; + flex-grow: 1; + box-sizing: border-box; + padding: 1rem; +} +div.document div.highlight { + color: #252630; + box-sizing: border-box; + padding: 0.2rem 1rem; + margin: 0.5rem 0; + border-radius: 0.2rem; + font-size: 0.9rem; +} +div.document div.highlight pre { + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; +} +div.document div[class*=highlight] { + overflow-x: auto; +} +div.document a.headerlink { + font-size: 0.6em; + display: none; + padding-left: 0.5rem; + vertical-align: middle; +} +div.document h1, +div.document h2, +div.document h3, +div.document h4, +div.document h5, +div.document h6, +div.document str, +div.document b { + font-weight: 700; +} +div.document h1 { + margin: 0.8rem 0 0.5rem 0; +} +div.document h2 { + margin: 0.8rem 0 0.5rem 0; +} +div.document h3, div.document h4 { + margin: 1rem 0 0.5rem 0; +} +div.document h1:hover a.headerlink, +div.document h2:hover a.headerlink, +div.document h3:hover a.headerlink, +div.document h4:hover a.headerlink { + display: inline-block; +} +div.document p, +div.document li { + font-size: 1rem; + line-height: 1.5; +} +div.document li p { + margin: 0 0 0.5rem 0; +} +div.document ul, div.document ol { + padding-left: 2rem; +} +div.document ol.loweralpha { + list-style: lower-alpha; +} +div.document ol.arabic { + list-style: decimal; +} +div.document ol.lowerroman { + list-style: lower-roman; +} +div.document ol.upperalpha { + list-style: upper-alpha; +} +div.document ol.upperroman { + list-style: upper-roman; +} +div.document dd { + margin-left: 1.5rem; +} +div.document hr { + border: none; + height: 1px; + background-color: var(--dividerColor); + margin: 2rem 0; +} +div.document table.docutils { + border-collapse: collapse; +} +div.document table.docutils th, div.document table.docutils td { + border: 1px solid var(--dividerColor); + box-sizing: border-box; + padding: 0.5rem 1rem; +} +div.document table.docutils th p, div.document table.docutils th ul, div.document table.docutils td p, div.document table.docutils td ul { + margin: 0.3rem 0; +} +div.document table.docutils th ul, div.document table.docutils td ul { + padding-left: 1rem; +} +div.document form input { + padding: 0.5rem; +} +div.document form input[type=submit], div.document form button { + border: none; + background-color: var(--mainNavColor); + color: white; + padding: 0.5rem 1rem; + border-radius: 0.2rem; +} +div.document span.highlighted { + background-color: var(--searchHighlightColor); + padding: 0 0.1em; +} +div.document div#search-results { + padding-top: 2rem; +} +div.document div#search-results p.search-summary { + font-size: 0.8em; +} +div.document div#search-results ul.search { + list-style: none; + padding-left: 0; +} +div.document div#search-results ul.search li { + border-bottom: 1px solid var(--dividerColor); + margin: 0; + padding: 2rem 0; +} +div.document div#search-results ul.search li > a:first-child { + font-size: 1.2rem; +} +div.document dd ul, div.document dd ol { + padding-left: 1rem; +} +div.document dl.py { + margin-bottom: 2rem; +} +div.document dl.py dt.sig { + background-color: var(--codeBackgroundColor); + color: var(--fontColor); + box-sizing: border-box; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9rem; + padding: 1rem; + border-left: 5px solid rgba(0, 0, 0, 0.1); + border-radius: 0.2rem; +} +div.document dl.py em.property { + color: var(--sidebarColor); + font-weight: bold; +} +div.document dl.py span.sig-name { + color: var(--codeBlue); + font-weight: bold; +} +div.document dl.py em.property { + color: var(--codeGreen); +} +div.document dl.py em.sig-param { + margin-left: 2rem; +} +div.document dl.py em.sig-param span.default_value { + color: var(--codeGreen); +} +div.document dl.py span.sig-return span.sig-return-typehint { + color: var(--fontColor); +} +div.document dl.py span.sig-return span.sig-return-typehint pre { + color: var(--fontColor); +} +div.document dl.py em.sig-param > span:first-child { + font-weight: bold; +} +div.document dl.cpp, div.document dl.c { + margin-bottom: 1rem; +} +div.document dl.cpp dt.sig, div.document dl.c dt.sig { + background-color: var(--codeBackgroundColor); + color: var(--fontColor); + box-sizing: border-box; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9rem; + padding: 1rem; + border-left: 5px solid rgba(0, 0, 0, 0.1); + border-radius: 0.2rem; + line-height: 1.4; +} +div.document dl.cpp span.sig-name, div.document dl.c span.sig-name { + color: var(--codeBlue); + font-weight: bold; +} +div.document dl.cpp span.sig-indent, div.document dl.c span.sig-indent { + margin-left: 2rem; +} +div.document dl.cpp span.target + span, div.document dl.c span.target + span { + color: var(--codeGreen); +} +div.document dl.cpp span.sig-param > span:first-child, div.document dl.c span.sig-param > span:first-child { + font-weight: bold; +} +div.document div.admonition { + box-shadow: 0px 0px 0px 1px var(--dividerColor); + border-radius: 0.2rem; + margin: 1rem 0; + overflow: hidden; +} +div.document div.admonition p { + box-sizing: border-box; + font-size: 0.9rem; + padding: 0.5rem; + margin: 0; +} +div.document div.admonition p:first-child { + padding-bottom: 0; + margin-bottom: 0; +} +div.document div.admonition p + p { + padding-top: 0.2rem; +} +div.document div.admonition p.admonition-title { + font-weight: bolder; + letter-spacing: 0.01rem; +} +div.document div.admonition.hint, div.document div.admonition.important, div.document div.admonition.tip { + border-left: 5px solid #56b79c; +} +div.document div.admonition.hint p.admonition-title, div.document div.admonition.important p.admonition-title, div.document div.admonition.tip p.admonition-title { + color: #56b79c; +} +div.document div.admonition.note { + border-left: 5px solid #587f9f; +} +div.document div.admonition.note p.admonition-title { + color: #587f9f; +} +div.document div.admonition.danger, div.document div.admonition.error { + border-left: 5px solid #e6a39a; +} +div.document div.admonition.danger p.admonition-title, div.document div.admonition.error p.admonition-title { + color: #e6a39a; +} +div.document div.admonition.attention, div.document div.admonition.caution, div.document div.admonition.warning { + border-left: 5px solid #e7b486; +} +div.document div.admonition.attention p.admonition-title, div.document div.admonition.caution p.admonition-title, div.document div.admonition.warning p.admonition-title { + color: #e7b486; } -/* - * Highlight color for code segments - */ -.highlight, .literal-block -{ - background: var(--aimet-code-grey); +/*****************************************************************************/ +/* Custom classes for UI control extensions: tabs, grids, cards. */ + +/* tab-heading: Heading for use inside tabs, since underline */ +/* notation doesn't work there. Invoke with: */ +/* .. container:: .tab-heading */ +/* Heading text */ + +.tab-heading p { + font-size: 1.2em !important; + font-weight: bold; } -/* - * Table options -*/ -/* Use root font size for generated table */ -html.writer-html5 .rst-content table.docutils td>p, html.writer-html5 .rst-content table.docutils th>p { - border: 0 solid var(--aimet-border-grey); - line-height: 1.5rem; - font-size: 1rem; +/*****************************************************************************/ +/* Sidebar */ +div.sphinxsidebar { + background-color: var(--sidebarColor); + border-right: 1px solid var(--dividerColor); + position: fixed; + left: 0; + top: 0; + bottom: 0; + width: 15rem; + box-sizing: border-box; + padding: var(--navbarHeight) 1rem 1rem; + z-index: 50; +} +@media (max-width: 50rem) { + div.sphinxsidebar { + display: none; + } +} +div.sphinxsidebar div.sphinxsidebarwrapper { + height: 100%; + overflow-y: auto; +} +/* This is a total hack to remove the inexplicable ToC from the left sidebar */ +div.sphinxsidebar div.sphinxsidebarwrapper > div { + display: none; +} +/* Format the TOC caption (title) */ +div.sphinxsidebar span.caption-text { + font-weight: bold; +} +div.sphinxsidebar ul { + padding-left: 0rem; + list-style: none; +} +div.sphinxsidebar ul li { + font-size: 0.9rem; + line-height: 1.2; +} +div.sphinxsidebar ul li a { + display: block; + box-sizing: border-box; + padding: 0 0.2rem 0.6rem; + color: var(--fontColor); + text-decoration: none; +} +div.sphinxsidebar ul li a.current { + color: var(--linkColor); +} +div.sphinxsidebar ul li a:hover { + color: var(--linkColor); +} +div.sphinxsidebar ul li > ul { + padding-left: 1rem; +} +div.sphinxsidebar p { + color: var(--faintFontColor); } -/* Remove internal bottom borders */ -.rst-content table.docutils td, .wy-table-bordered-all td { - border-left-width: 1px; - border-bottom-width: 0; +/*****************************************************************************/ +/* The right sidebar, showing the table of contents for the current page. */ +div#show_right_sidebar { + position: fixed; + right: 0; + top: 0; + z-index: 20; + background-color: var(--sidebarColor); + border-left: 1px solid var(--dividerColor); + border-bottom: 1px solid var(--dividerColor); + padding: var(--navbarHeight) 1rem 0rem; +} +div#show_right_sidebar p { + font-size: 0.9em; +} +div#show_right_sidebar p span { + color: var(--faintFontColor); + vertical-align: middle; +} +div#show_right_sidebar p span.icon { + color: var(--linkColor); + font-size: 0.9em; + padding-right: 0.2rem; } -/* Keep outside borders */ -.rst-content table.docutils, .wy-table-bordered-all { - border: 1px solid var(--aimet-border-grey); +div#right_sidebar { + position: fixed; + right: 0; + top: 0; + z-index: 50; + background-color: var(--sidebarColor); + width: 15rem; + border-left: 1px solid var(--dividerColor); + box-sizing: border-box; + padding: var(--navbarHeight) 1rem 1rem; + height: 100%; + overflow-y: auto; +} +div#right_sidebar p span { + color: var(--faintFontColor); + vertical-align: middle; +} +div#right_sidebar p span.icon { + color: var(--linkColor); + font-size: 0.9em; + padding-right: 0.2rem; +} +div#right_sidebar ul { + padding-left: 0rem; + list-style: none; +} +div#right_sidebar ul li { + font-size: 0.9rem; + line-height: 1.2; +} +div#right_sidebar ul li a { + display: block; + box-sizing: border-box; + padding: 0 0.2rem 0.6rem; + color: var(--fontColor); + text-decoration: none; +} +div#right_sidebar ul li a.current { + color: var(--linkColor); +} +div#right_sidebar ul li a:hover { + color: var(--linkColor); +} +div#right_sidebar ul li > ul { + padding-left: 1rem; +} +div#right_sidebar p { + color: var(--faintFontColor); +} +@media (max-width: 50rem) { + div#right_sidebar { + display: none; + } } -/* Set text color of side navigation TOC section headers */ -.wy-menu-vertical header, .wy-menu-vertical p.caption { - color: var(--aimet-dark-blue); +/* Limit the TOC depth in the right sidebar */ +div.page_toc ul ul ul { + display: none; } +/*****************************************************************************/ +/* Footer */ +div.footer { + box-sizing: border-box; + padding-top: 2rem; + font-size: 0.7rem; + text-align: center; + text-transform: uppercase; + color: var(--faintFontColor); +} -/* - * Configure the appearance of code output box. - * Values are based on sphinx-gallery configuration -*/ -.script-output { - color: black; - display: flex; - gap: 0.5em; +p#theme_credit { + font-size: 0.6rem; + text-transform: uppercase; + text-align: center; + color: var(--faintFontColor); } -.script-output::before { - content: "Out:"; - line-height: 1.4; - padding-top: 10px; +/*****************************************************************************/ +/* Buttons */ +div.button_nav_wrapper { + margin-left: 15rem; + margin-right: 15rem; +} +@media (max-width: 50rem) { + div.button_nav_wrapper { + margin-left: 0px; + margin-right: 0px; + } +} +div.button_nav_wrapper div.button_nav { + max-width: 45rem; + margin: 0 auto; + display: flex; + flex-direction: row; + width: 100%; +} +div.button_nav_wrapper div.button_nav div { + box-sizing: border-box; + padding: 1rem; + flex: 50%; +} +div.button_nav_wrapper div.button_nav div a { + display: block; +} +div.button_nav_wrapper div.button_nav div a span { + vertical-align: middle; +} +div.button_nav_wrapper div.button_nav div a span.icon { + font-weight: bold; + font-size: 0.8em; +} +div.button_nav_wrapper div.button_nav div.left a { + text-align: left; +} +div.button_nav_wrapper div.button_nav div.left a span.icon { + padding-right: 0.4rem; +} +div.button_nav_wrapper div.button_nav div.right a { + text-align: right; +} +div.button_nav_wrapper div.button_nav div.right a span.icon { + padding-left: 0.4rem; } -.script-output .highlight { - background: transparent; - flex-grow: 1; - overflow: auto; - /* Allow output block to take up maximum 25 lines */ - max-height: 25em; +/*****************************************************************************/ +/* Pygments overrides in dark mode */ +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight { + --black: #000000; + --red: #ff9393; + --darkBlue: #6b83fe; + --grey: #a8a8a8; + --pink: #ff99d8; + --torquoise: #68e9e9; + --brown: #d48a00; + --purple: #ce04e9; + --paleYellow: #454534; + background: var(--codeBackgroundColor); + color: var(--fontColor); + /* Comment */ + /* Error */ + /* Keyword */ + /* Operator */ + /* Comment.Hashbang */ + /* Comment.Multiline */ + /* Comment.Preproc */ + /* Comment.PreprocFile */ + /* Comment.Single */ + /* Comment.Special */ + /* Generic.Deleted */ + /* Generic.Emph */ + /* Generic.Error */ + /* Generic.Heading */ + /* Generic.Inserted */ + /* Generic.Output */ + /* Generic.Prompt */ + /* Generic.Strong */ + /* Generic.Subheading */ + /* Generic.Traceback */ + /* Keyword.Constant */ + /* Keyword.Declaration */ + /* Keyword.Namespace */ + /* Keyword.Pseudo */ + /* Keyword.Reserved */ + /* Keyword.Type */ + /* Literal.Number */ + /* Literal.String */ + /* Name.Attribute */ + /* Name.Builtin */ + /* Name.Class */ + /* Name.Constant */ + /* Name.Decorator */ + /* Name.Entity */ + /* Name.Exception */ + /* Name.Function */ + /* Name.Label */ + /* Name.Namespace */ + /* Name.Tag */ + /* Name.Variable */ + /* Operator.Word */ + /* Text.Whitespace */ + /* Literal.Number.Bin */ + /* Literal.Number.Float */ + /* Literal.Number.Hex */ + /* Literal.Number.Integer */ + /* Literal.Number.Oct */ + /* Literal.String.Affix */ + /* Literal.String.Backtick */ + /* Literal.String.Char */ + /* Literal.String.Delimiter */ + /* Literal.String.Doc */ + /* Literal.String.Double */ + /* Literal.String.Escape */ + /* Literal.String.Heredoc */ + /* Literal.String.Interpol */ + /* Literal.String.Other */ + /* Literal.String.Regex */ + /* Literal.String.Single */ + /* Literal.String.Symbol */ + /* Name.Builtin.Pseudo */ + /* Name.Function.Magic */ + /* Name.Variable.Class */ + /* Name.Variable.Global */ + /* Name.Variable.Instance */ + /* Name.Variable.Magic */ + /* Literal.Number.Integer.Long */ +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight pre, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight pre { + line-height: 125%; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight td.linenos .normal, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight td.linenos .normal { + color: inherit; + background-color: transparent; + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight span.linenos, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight span.linenos { + color: inherit; + background-color: transparent; + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight td.linenos .special, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight td.linenos .special { + color: var(--black); + background-color: var(--paleYellow); + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight span.linenos.special, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight span.linenos.special { + color: var(--black); + background-color: var(--paleYellow); + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .hll, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .hll { + background-color: var(--paleYellow); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .c, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .c { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .err, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .err { + border: 1px solid var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .k, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .k { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .o, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .o { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ch, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ch { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cm { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cp { + color: var(--brown); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cpf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cpf { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .c1, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .c1 { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cs, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cs { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gd { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ge, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ge { + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gr { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gh { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gi { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .go, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .go { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gp { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gs, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gs { + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gu, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gu { + color: var(--purple); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gt { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kc { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kd { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kn, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kn { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kp { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kr { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kt { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .m, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .m { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .na, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .na { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nb { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nc { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .no, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .no { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nd { + color: var(--purple); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ni, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ni { + color: var(--grey); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ne, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ne { + color: var(--red); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nf { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nl, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nl { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nn, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nn { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nt { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nv, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nv { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ow, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ow { + color: var(--pink); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .w, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .w { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mb { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mf { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mh { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mi { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mo, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mo { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sa, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sa { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sb { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sc { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .dl, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .dl { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sd { + color: var(--red); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s2, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s2 { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .se, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .se { + color: var(--brown); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sh { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .si, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .si { + color: var(--pink); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sx, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sx { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sr { + color: var(--pink); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s1, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s1 { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ss, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ss { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .bp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .bp { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .fm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .fm { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vc { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vg, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vg { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vi { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vm { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .il, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .il { + color: var(--grey); } +/*# sourceMappingURL=basic_mod.css.map */ diff --git a/Docs/beta/apiref/index.rst b/Docs/beta/apiref/index.rst new file mode 100644 index 00000000000..3d98f4071dc --- /dev/null +++ b/Docs/beta/apiref/index.rst @@ -0,0 +1,5 @@ +.. _apiref-index: + +######### +AIMET API +######### \ No newline at end of file diff --git a/Docs/beta/apiref/onnx/adaround.rst b/Docs/beta/apiref/onnx/adaround.rst new file mode 100644 index 00000000000..bdc8487ac32 --- /dev/null +++ b/Docs/beta/apiref/onnx/adaround.rst @@ -0,0 +1,21 @@ +.. _apiref-onnx-adaround: + :orphan: + +####################### +AIMET ONNX AdaRound API +####################### + +**Top-level API** + +.. autofunction:: aimet_onnx.adaround.adaround_weight.Adaround.apply_adaround + + +**Adaround Parameters** + + +.. autoclass:: aimet_onnx.adaround.adaround_weight.AdaroundParameters + :members: + +.. note:: + + We recommended using onnx-simplifier before adarounding the model. diff --git a/Docs/beta/apiref/tensorflow/adaround.rst b/Docs/beta/apiref/tensorflow/adaround.rst new file mode 100644 index 00000000000..28860723e75 --- /dev/null +++ b/Docs/beta/apiref/tensorflow/adaround.rst @@ -0,0 +1,22 @@ +.. _apiref-keras-adaround: + :orphan: + +############################# +AIMET TensorFlow AdaRound API +############################# + +**Top-level API** + +.. autofunction:: aimet_tensorflow.keras.adaround_weight.Adaround.apply_adaround + +**Adaround Parameters** + +.. autoclass:: aimet_tensorflow.keras.adaround_weight.AdaroundParameters + :members: + +**Enum Definition** + +**Quant Scheme Enum** + +.. autoclass:: aimet_common.defs.QuantScheme + :members: \ No newline at end of file diff --git a/Docs/beta/apiref/torch/adaround.rst b/Docs/beta/apiref/torch/adaround.rst new file mode 100644 index 00000000000..005f56c28df --- /dev/null +++ b/Docs/beta/apiref/torch/adaround.rst @@ -0,0 +1,37 @@ +.. _apiref-torch-adaround: + :orphan: + +########################## +AIMET PyTorch AdaRound API +########################## + + +.. container:: tab-heading + + Top-level API + +.. note:: + + This module is also available in the experimental :mod:`aimet_torch.v2` namespace with the same top-level API. + To learn more about the differences between :mod:`aimet_torch` and :mod:`aimet_torch.v2`, see the + QuantSim v2 Overview. + +.. autofunction:: aimet_torch.v1.adaround.adaround_weight.Adaround.apply_adaround + + +.. container:: tab-heading + + Adaround Parameters + +.. autoclass:: aimet_torch.v1.adaround.adaround_weight.AdaroundParameters + :members: + + +.. container:: tab-heading + + Enum Definition + +**Quant Scheme Enum** + +.. autoclass:: aimet_common.defs.QuantScheme + :members: \ No newline at end of file diff --git a/Docs/beta/apiref/torch/model_preparer.rst b/Docs/beta/apiref/torch/model_preparer.rst new file mode 100644 index 00000000000..8412242ef1e --- /dev/null +++ b/Docs/beta/apiref/torch/model_preparer.rst @@ -0,0 +1,153 @@ +.. _apiref-torch-model-preparer: +:orphan: + +################## +Model Preparer API +################## + +AIMET PyTorch ModelPreparer API uses new graph transformation feature available in PyTorch 1.9+ version and automates +model definition changes required by user. For example, it changes functionals defined in forward pass to +torch.nn.Module type modules for activation and elementwise functions. Also, when torch.nn.Module type modules are reused, +it unrolls into independent modules. + +Users are strongly encouraged to use AIMET PyTorch ModelPreparer API first and then use the returned model as input +to all the AIMET Quantization features. + +AIMET PyTorch ModelPreparer API requires minimum PyTorch 1.9 version. + + +.. container:: tab-heading + + Top-level API + +.. autofunction:: aimet_torch.model_preparer.prepare_model + + +.. container:: tab-heading + +Limitations of torch.fx symbolic trace API + +Limitations of torch.fx symbolic trace: https://pytorch.org/docs/stable/fx.html#limitations-of-symbolic-tracing + +**1. Dynamic control flow is not supported by torch.fx** +Loops or if-else statement where condition may depend on some of the input values. It can only trace one execution +path and all the other branches that weren't traced will be ignored. For example, following simple function when traced, +will fail with TraceError saying that 'symbolically traced variables cannot be used as inputs to control flow':: + + def f(x, flag): + if flag: + return x + else: + return x*2 + + torch.fx.symbolic_trace(f) # Fails! + fx.symbolic_trace(f, concrete_args={'flag': True}) + +Workarounds for this problem: + +- Many cases of dynamic control flow can be simply made to static control flow which is supported by torch.fx + symbolic tracing. Static control flow is where loops or if-else statements whose value can't change + across different model forward passes. Such cases can be traced by removing data dependencies on input values by + passing concrete values to 'concrete_args' to specialize your forward functions. + +- In truly dynamic control flow, user should wrap such piece of code at model-level scope using torch.fx.wrap API + which will preserve it as a node instead of being traced through:: + + @torch.fx.wrap + def custom_function_not_to_be_traced(x, y): + """ Function which we do not want to be traced, when traced using torch FX API, call to this function will + be inserted as call_function, and won't be traced through """ + for i in range(2): + x += x + y += y + return x * x + y * y + +**2. Non-torch functions which does not use __torch_function__ mechanism is not supported by default in symbolic +tracing.** + +Workaround for this problem: + +- If we do not want to capture them in symbolic tracing then user should use torch.fx.wrap() API at module-level scope:: + + import torch + import torch.fx + torch.fx.wrap('len') # call the API at module-level scope. + torch.fx.wrap('sqrt') # call the API at module-level scope. + + class ModelWithNonTorchFunction(torch.nn.Module): + def __init__(self): + super(ModelWithNonTorchFunction, self).__init__() + self.conv = torch.nn.Conv2d(3, 4, kernel_size=2, stride=2, padding=2, bias=False) + + def forward(self, *inputs): + x = self.conv(inputs[0]) + return x / sqrt(len(x)) + + model = ModelWithNonTorchFunction().eval() + model_transformed = prepare_model(model) + + +**3. Customizing the behavior of tracing by overriding the Tracer.is_leaf_module() API** + +In symbolic tracing, leaf modules appears as node rather than being traced through and all the standard torch.nn modules +are default set of leaf modules. But this behavior can be changed by overriding the Tracer.is_leaf_module() API. + +AIMET model preparer API exposes 'module_to_exclude' argument which can be used to prevent certain module(s) being +traced through. For example, let's examine following code snippet where we don't want to trace CustomModule further:: + + class CustomModule(torch.nn.Module): + @staticmethod + def forward(x): + return x * torch.nn.functional.softplus(x).sigmoid() + + class CustomModel(torch.nn.Module): + def __init__(self): + super(CustomModel, self).__init__() + self.conv1 = torch.nn.Conv2d(3, 8, kernel_size=2) + self.custom = CustomModule() + + def forward(self, inputs): + x = self.conv1(inputs) + x = self.custom(x) + return x + + model = CustomModel().eval() + prepared_model = prepare_model(model, modules_to_exclude=[model.custom]) + print(prepared_model) + +In this example, 'self.custom' is preserved as node and not being traced through. + +**4. Tensor constructors are not traceable** + +For example, let's examine following code snippet:: + + def f(x): + return torch.arange(x.shape[0], device=x.device) + + torch.fx.symbolic_trace(f) + + Error traceback: + return torch.arange(x.shape[0], device=x.device) + TypeError: arange() received an invalid combination of arguments - got (Proxy, device=Attribute), but expected one of: + * (Number end, *, Tensor out, torch.dtype dtype, torch.layout layout, torch.device device, bool pin_memory, bool requires_grad) + * (Number start, Number end, Number step, *, Tensor out, torch.dtype dtype, torch.layout layout, torch.device device, bool pin_memory, bool requires_grad) + +The above snippet is problematic because arguments to torch.arange() are input dependent. +Workaround for this problem: + +- use deterministic constructors (hard-coding) so that the value they produce will be embedded as constant in + the graph:: + + def f(x): + return torch.arange(10, device=torch.device('cpu')) + +- Or use torch.fx.wrap API to wrap torch.arange() and call that instead:: + + @torch.fx.wrap + def do_not_trace_me(x): + return torch.arange(x.shape[0], device=x.device) + + def f(x): + return do_not_trace_me(x) + + torch.fx.symbolic_trace(f) diff --git a/Docs/beta/conf.py b/Docs/beta/conf.py index f2eba1f9acd..df85b122624 100644 --- a/Docs/beta/conf.py +++ b/Docs/beta/conf.py @@ -59,9 +59,9 @@ author = 'Qualcomm Innovation Center, Inc.' # The short X.Y version -version = '' +version = '2.0' # The full version, including alpha/beta/rc tags -release = '' +release = '2.0' if "SW_VERSION" in os.environ: version = os.environ['SW_VERSION'] else: @@ -77,7 +77,7 @@ def setup(app): # If your documentation needs a minimal Sphinx version, state it here. # -# needs_sphinx = '1.0' +# needs_sphinx = '5.3.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -92,7 +92,9 @@ def setup(app): 'sphinx.ext.mathjax', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode', - 'sphinx.ext.napoleon' + 'sphinx.ext.napoleon', + 'sphinx_tabs.tabs', + 'sphinx_design' ] # Add any paths that contain templates here, relative to this directory. @@ -105,7 +107,7 @@ def setup(app): source_suffix = '.rst' # The master toctree document. -master_doc = 'index' +root_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -130,20 +132,25 @@ def setup(app): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' -html_title = 'AI Model Efficiency Toolkit Documentation: ver ' + version -html_logo = 'images/brain_logo.png' +html_theme = 'piccolo_theme' +html_title = 'AI Model Efficiency Toolkit Documentation version ' + version +html_short_title = 'AIMET Docs v. ' + version +# html_logo = 'images/brain_logo.png' +# html_favicon = 'images/brain_logo16.png' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { + 'banner_hiding': "temporary", + 'banner_text': "", 'display_version': True, - 'prev_next_buttons_location': 'both', - 'collapse_navigation': False, - 'sticky_navigation': True, - 'navigation_depth': 10 + 'source_url': "https://github.com/quic/aimet", + 'localtoc_maxdepth': 2, + 'globaltoc_includehidden': True, + 'globaltoc_maxdepth': 2, + 'globaltoc_collapse': False } # Add any paths that contain custom static files (such as style sheets) here, @@ -160,8 +167,7 @@ def setup(app): # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # -html_sidebars = {'**': ['localtoc.html', 'relations.html', 'searchbox.html']} - +html_sidebars = {'**': ['globaltoc.html', 'localtoc.html', 'searchbox.html'] } # -- Options for HTMLHelp output --------------------------------------------- @@ -200,7 +206,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'AIMET.tex', 'AI Model Efficiency Toolkit Documentation', + (root_doc, 'AIMET.tex', 'AI Model Efficiency Toolkit Documentation', 'Qualcomm Innovation Center, Inc.', 'manual'), ] @@ -210,7 +216,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'aimet', 'AI Model Efficiency Toolkit Documentation', + (root_doc, 'aimet', 'AI Model Efficiency Toolkit Documentation', [author], 1) ] @@ -221,7 +227,7 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'AIMET', 'AI Model Efficiency Toolkit Documentation', + (root_doc, 'AIMET', 'AI Model Efficiency Toolkit Documentation', author, 'AIMET', 'One line description of project.', 'Miscellaneous'), ] @@ -233,6 +239,16 @@ def setup(app): nbsphinx_allow_errors = True nbsphinx_execute = 'never' +docs_root_url = "https://quic.github.io/aimet-pages/releases/latest/" + +# Version here refers to the AIMET torch v1/v2 version, not the AIMET release number +html_context = { + 'current_version' : "Universal", + 'versions' : [["Universal", docs_root_url + "features/index.html"], + ["PyTorch", docs_root_url + "torch_v2/torch_docs/index.html"]], + 'display_version_tab': False +} + autosummary_generate = False # contains a list of modules to be mocked up which are not available during docs build time diff --git a/Docs/beta/examples/index.rst b/Docs/beta/examples/index.rst new file mode 100644 index 00000000000..5da28a64261 --- /dev/null +++ b/Docs/beta/examples/index.rst @@ -0,0 +1,5 @@ +.. _examples-index: + +######### +Examples +######### \ No newline at end of file diff --git a/Docs/beta/featureguide/adaround.rst b/Docs/beta/featureguide/adaround.rst new file mode 100644 index 00000000000..a7c4e10c9e3 --- /dev/null +++ b/Docs/beta/featureguide/adaround.rst @@ -0,0 +1,187 @@ +.. _featureguide-adaround: + +################# +Adaptive rounding +################# + +Context +======= + +By default, AIMET uses *nearest rounding* for quantization, in which weight values are quantized to the nearest integer value. + +AIMET adaptive rounding (AdaRound) uses training data to choose how to round quantized weights, improving the quantized model's accuracy in many cases. + +The following figures illustrates how AdaRound might change the rounding of a quantized value. + +.. image:: ../images/adaround.png + :width: 600px + +See the :doc:`Optimization User Guide <../optimization/index>` for a discussion of the recommended sequence of all quantization techniques. + +Complementary techniques +------------------------ + +We recommend using AdaRound in combination with these other techniques: + +- After batch norm folding (BNF) and cross layer equalization (CLE). Applying these techniques first might improve the accuracy gained using AdaRound. +- Before quantization aware training (QAT). For some models applying BNF and CLE may not help. For these models, applying AdaRound before QAT might help. AdaRound is a better weights initialization step that speeds up QAT + +Conversely, we recommend that you *do not* apply bias correction (BC) before or after using AdaRound. + +Hyper parameters +---------------- + +A number of hyper parameters used during AdaRound optimization are exposed in the API. The default values of some of these parameters tend to lead to stable results and we recommend that you not change them. + +Use the following guideline for adjusting hyper parameters with AdaRound. + +Hyper Parameters to be changed at will + - Number of batches. AdaRound should see 500-1000 images. Loader batch size times number of batches gives the number of images. For example if the data loader batch size is 64, set 16 batches to yield 1024 images. + - Number of iterations. Default is 10,000. + +Hyper Parameters to change with caution + Regularization parameter. Default is 0.01. + +Hyper Parameters to avoid changing + - Beta range. Leave the value at the default of (20, 2). + - Warm start period. Leave at the default value, 20%. + +Workflow +======== + +Prerequisites +------------- + +To use AdaRound, you must: + +- Load a trained model +- Create a training or validation dataloader for the model + +Workflow +-------- + +Step 1 +~~~~~~ + +Prepare the model for quantization. + +.. tabs:: + + .. tab:: PyTorch + + .. container:: tab-heading + + Prepare the model for quantization + + AIMET quantization simulation (QuantSim) for PyTorch requires the user's model definition to follow certain guidelines. For example, functionals defined in forward pass should be changed to an equivalent + **torch.nn.Module**. For a list of these guidelines, see the :ref:`Optimization Guide `. + + Use the :ref:`AIMET ModelPreparer API ` graph transformation feature to automate the model definition changes required to comply with the QuantSim guidelines. + + .. literalinclude:: ../snippets/torch/prepare_model.py + :language: python + + For details of the model preparer API see the + :ref:`Model Preparer API `. + + .. tab:: TensorFlow + + Tensorflow has no preparation requirements. + + .. tab:: ONNX + + ONNX has no preparation requirements. + + +Step 2 +~~~~~~ + +Apply AdaRound to the model. + +.. tabs:: + + .. tab:: PyTorch + + .. literalinclude:: ../snippets/torch/apply_adaround.py + :language: python + + .. tab:: TensorFlow + + .. literalinclude:: ../snippets/tensorflow/apply_adaround.py + :language: python + + .. tab:: ONNX + + .. literalinclude:: ../snippets/onnx/apply_adaround.py + :language: python + +Step 3 +~~~~~~ + +Evaluate the model. + +.. tabs:: + + .. tab:: PyTorch + + .. literalinclude:: ../snippets/torch/evaluate.py + :language: python + + .. tab:: TensorFlow + + .. literalinclude:: ../snippets/tensorflow/evaluate.py + :language: python + + .. tab:: ONNX + + .. literalinclude:: ../snippets/onnx/evaluate.py + :language: python + + +Results +------- + +AdaRound should result in improved accuracy, but does not guaranteed sufficient improvement. + + +Next steps +---------- + +If AdaRound resulted in satisfactory accuracy, export the model. + +.. tabs:: + + .. tab:: PyTorch + + .. literalinclude:: ../snippets/torch/export.py + :language: python + + .. tab:: TensorFlow + + .. literalinclude:: ../snippets/tensorflow/export.py + :language: python + + .. tab:: ONNX + + .. literalinclude:: ../snippets/onnx/export.py + :language: python + +If the model is still not accurate enough, the next step is typically to try :ref:`quantization-aware training `. + + +API +=== + +.. tabs:: + + .. tab:: PyTorch + + .. include:: ../apiref/torch/adaround.rst + + .. tab:: TensorFlow + + .. include:: ../apiref/tensorflow/adaround.rst + + .. tab:: ONNX + + .. include:: ../apiref/onnx/adaround.rst diff --git a/Docs/beta/featureguide/autoquant.rst b/Docs/beta/featureguide/autoquant.rst new file mode 100644 index 00000000000..5bf12f60e6f --- /dev/null +++ b/Docs/beta/featureguide/autoquant.rst @@ -0,0 +1,47 @@ +.. _featureguide-autoquant: + +################# +Automatic quantization +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/bn.rst b/Docs/beta/featureguide/bn.rst new file mode 100644 index 00000000000..73a9ac223e5 --- /dev/null +++ b/Docs/beta/featureguide/bn.rst @@ -0,0 +1,47 @@ +.. _featureguide-bn: + +################# +Batch norm re-estimation +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/cle.rst b/Docs/beta/featureguide/cle.rst new file mode 100644 index 00000000000..44db666ed0d --- /dev/null +++ b/Docs/beta/featureguide/cle.rst @@ -0,0 +1,47 @@ +.. _featureguide-cle: + +################# +Cross-layer equalization +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/cp.rst b/Docs/beta/featureguide/cp.rst new file mode 100644 index 00000000000..5f011a42998 --- /dev/null +++ b/Docs/beta/featureguide/cp.rst @@ -0,0 +1,47 @@ +.. _featureguide-cp: + +################# +Channel pruning +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/index.rst b/Docs/beta/featureguide/index.rst new file mode 100644 index 00000000000..4e9fe4ce6bc --- /dev/null +++ b/Docs/beta/featureguide/index.rst @@ -0,0 +1,69 @@ +.. _featureguide--index: + +####################### +Optimization Techniques +####################### + +.. toctree:: + :hidden: + + Quantization aware training + Automatic quantization + Adaptive rounding + Cross-layer equalization + Batch norm re-estimation + Quantization analyzer + Visualization + Weight SVD + Spatial SVD + Channel pruning + +:ref:`Quantization aware training (QAT) ` +====================================================== + +Fine-tunes the model parameters in the presence of quantization noise. + +:ref:`Automatic quantization (AutoQuant) ` +============================================================= + +Analyzes the model, determines the best sequence of AIMET post-training quantization techniques, and applies these techniques. + +:ref:`Adaptive rounding (Adaround) ` +====================================================== + +Uses training data to improve accuracy over naïve rounding. + +:ref:`Cross-layer equalization (CLE) ` +=================================================== + +Scales the parameter ranges across different channels to increase the range for layers with low range and reduce range for layers with high range, enabling the same quantizaion parameters to be used across all channels. + +:ref:`Batch norm re-estimation (BN) ` +================================================= + +Re-estimated statistics are used to adjust the quantization scale parameters of preceeding Convolution or Linear layers, effectively folding the BN layers. + +:ref:`Quantization analyzer (QuantAnalzer) ` +==================================================================== + +Automatically identify sensitive areas and hotspots in the model. + +:ref:`Visualization ` +============================================ + +Automatically identify sensitive areas and hotspots in the model. + +:ref:`Weight singular value decomposition (Weight SVD) ` +============================================================================ + +Decomposes one large MAC or memory layer into two smaller layers. + +:ref:`Spatial singular value decomposition (Spatial SVD) ` +=============================================================================== + +Decomposes one large convolution (Conv) MAC or memory layer into two smaller layers. + +:ref:`Channel pruning (CP) ` +======================================== + +Removes less-important input channels from 2D convolution layers. diff --git a/Docs/beta/featureguide/qat.rst b/Docs/beta/featureguide/qat.rst new file mode 100644 index 00000000000..b96f31726ae --- /dev/null +++ b/Docs/beta/featureguide/qat.rst @@ -0,0 +1,47 @@ +.. _featureguide-qat: + +################# +Quantization aware training +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/quant_analyzer.rst b/Docs/beta/featureguide/quant_analyzer.rst new file mode 100644 index 00000000000..be957cdd45a --- /dev/null +++ b/Docs/beta/featureguide/quant_analyzer.rst @@ -0,0 +1,47 @@ +.. _featureguide-quant-analyzer: + +################# +Quantization analyzer +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/quant_sim.rst b/Docs/beta/featureguide/quant_sim.rst new file mode 100644 index 00000000000..c5dc7840b47 --- /dev/null +++ b/Docs/beta/featureguide/quant_sim.rst @@ -0,0 +1,47 @@ +.. _featureguide-quant-sim: + +################# +Quantization simulation +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/spatial_svd.rst b/Docs/beta/featureguide/spatial_svd.rst new file mode 100644 index 00000000000..d1745483361 --- /dev/null +++ b/Docs/beta/featureguide/spatial_svd.rst @@ -0,0 +1,47 @@ +.. _featureguide-spatial-svd: + +################# +Spatial SVD +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/visualization.rst b/Docs/beta/featureguide/visualization.rst new file mode 100644 index 00000000000..cd61608e9fa --- /dev/null +++ b/Docs/beta/featureguide/visualization.rst @@ -0,0 +1,47 @@ +.. _featureguide-visualization: + +################# +Visualization +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/featureguide/weight_svd.rst b/Docs/beta/featureguide/weight_svd.rst new file mode 100644 index 00000000000..99a4e2dd3ca --- /dev/null +++ b/Docs/beta/featureguide/weight_svd.rst @@ -0,0 +1,47 @@ +.. _featureguide-weight-svd: + +################# +Weight SVD +################# + +Context +======= + +Prerequisites +============= + +Workflow +======== + +.. tabs:: + + .. tab:: PyTorch + + PyTorch code example. + + .. literalinclude:: ../torch_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: TensorFlow + + Keras code example. + + .. literalinclude:: ../keras_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + .. tab:: ONNX + + ONNX code example. + + .. literalinclude:: ../onnx_code_examples/adaround.py + :language: python + :pyobject: apply_adaround_example + + +Results +======= + +Next steps +========== diff --git a/Docs/beta/images/adaround.png b/Docs/beta/images/adaround.png new file mode 100644 index 00000000000..ccb29564a85 Binary files /dev/null and b/Docs/beta/images/adaround.png differ diff --git a/Docs/beta/index.rst b/Docs/beta/index.rst index 3716593b4f6..d4e3f4832d8 100644 --- a/Docs/beta/index.rst +++ b/Docs/beta/index.rst @@ -1,3 +1,72 @@ -============ -Hello World! -============ +.. _top-index: + +###################################### +AI Model Efficiency Toolkit Documentation +###################################### + +.. toctree:: + :hidden: + :includehidden: + + Quick Start <../install/quick-start> + Installation <../install/index> + AIMET Optimization Guide <../opt-guide/index> + Quantization Simulation Guide <../quantsim/index> + AIMET Feature Guide <../featureguide/index> + Examples <../examples/index> + API Reference <../apiref/index> + Release Notes <../rn/index> + +AI Model Efficiency Toolkit (AIMET) is a software toolkit for quantizing and compressing models. + +The goal of optimizing a model is to enable its use on an edge device such as a mobile phone or laptop. + +AIMET uses post-training and fine tuning techniques to optimize trained models in ways that minimize accuracy loss incurred during quantization or compression. + +AIMET supports PyTorch, TensorFlow, and Keras models, and ONNX models with limited functionality. + +Quick Start +=========== + +To install and get started as quickly as possibly using AIMET with PyTorch, see the :doc:`Quick Start guide <../install/quick-start>`. + +Installation +============= + +For other install options, including for TensorFlow and ONNX platforms or to run AIMET in a Docker container, see :doc:`Installation <../install/index>`. + +Optimization Guide +================== + +For a high-level explanation of how to use AIMET to optimize a model, see the :doc:`Optimization User Guide <../opt-guide/index>`. + +Feature Guide +============= + +For instructions on applying individual AIMET features, see the :doc:`Features User Guide <../featureguide/index>`. + +Quantization Simulation Guide +============================= + +Quantization simulation (QuantSim) provides an approximation of a quantized model by inserting quantization operations in a trained model. QuantSim enables application of optimization techniques to a model and testing of the resulting model before the model is exported. + +Examples +======== + +To view end-to-end examples of model quantization and compression, and to download the examples in Jupyter notebook format, see :doc:`Examples <../examples/index>`. + +API Reference +============= + +For a detailed look at the AIMET API, see the :doc:`API Reference <../apiref/index>`. + +Release Notes +============= + +For information specific to this release, see :doc:`Release Notes <../rn/index>`. + + +| |project| is a product of |author| +| Qualcomm\ |reg| Neural Processing SDK is a product of Qualcomm Technologies, Inc. and/or its subsidiaries. + +.. |reg| unicode:: U+000AE .. REGISTERED SIGN diff --git a/Docs/beta/install/index.rst b/Docs/beta/install/index.rst new file mode 100644 index 00000000000..a23bb147a59 --- /dev/null +++ b/Docs/beta/install/index.rst @@ -0,0 +1,5 @@ +.. _install-index: + +######### +Installation +######### \ No newline at end of file diff --git a/Docs/beta/install/quick-start.rst b/Docs/beta/install/quick-start.rst new file mode 100644 index 00000000000..f6a5cd22bfd --- /dev/null +++ b/Docs/beta/install/quick-start.rst @@ -0,0 +1,5 @@ +.. _install-quick-start: + +######### +Quick Start +######### \ No newline at end of file diff --git a/Docs/beta/opt-guide/compression/index.rst b/Docs/beta/opt-guide/compression/index.rst new file mode 100644 index 00000000000..ababf1736eb --- /dev/null +++ b/Docs/beta/opt-guide/compression/index.rst @@ -0,0 +1,5 @@ +.. _opt-guide-compression: + +######### +Model Compression +######### diff --git a/Docs/beta/opt-guide/index.rst b/Docs/beta/opt-guide/index.rst new file mode 100644 index 00000000000..67471a25bb1 --- /dev/null +++ b/Docs/beta/opt-guide/index.rst @@ -0,0 +1,21 @@ +.. _opt-guide-index: + +################## +Model Optimization +################## + +.. toctree:: + :hidden: + :depth: 1 + + Overview + Quantization + Compression + +This user guide is organized into the following sections: + +:ref:`Overview ` is a general discussion of how AIMET optimizes models. + +:ref:`Quantization ` describes how AIMET applies quantization techniques. + +:ref:`Compression ` describes how AIMET applies compression techniques. \ No newline at end of file diff --git a/Docs/beta/opt-guide/overview.rst b/Docs/beta/opt-guide/overview.rst new file mode 100644 index 00000000000..b0e676a1570 --- /dev/null +++ b/Docs/beta/opt-guide/overview.rst @@ -0,0 +1,5 @@ +.. _opt-guide-overview: + +######### +Optimization overview +######### \ No newline at end of file diff --git a/Docs/beta/opt-guide/overview/index.rst b/Docs/beta/opt-guide/overview/index.rst new file mode 100644 index 00000000000..179bd1d83a8 --- /dev/null +++ b/Docs/beta/opt-guide/overview/index.rst @@ -0,0 +1,5 @@ +.. _opt-guide-overview: + +############################### +Optimization technical overview +############################### diff --git a/Docs/beta/opt-guide/quantization/index.rst b/Docs/beta/opt-guide/quantization/index.rst new file mode 100644 index 00000000000..d334459780d --- /dev/null +++ b/Docs/beta/opt-guide/quantization/index.rst @@ -0,0 +1,5 @@ +.. _opt-guide-quantization: + +######### +Model Quantization +######### diff --git a/Docs/beta/quantsim/index.rst b/Docs/beta/quantsim/index.rst new file mode 100644 index 00000000000..59b3b817db8 --- /dev/null +++ b/Docs/beta/quantsim/index.rst @@ -0,0 +1,5 @@ +.. _quantsim-index: + +############################# +Quantization Simulation Guide +############################# \ No newline at end of file diff --git a/Docs/beta/rn/index.rst b/Docs/beta/rn/index.rst new file mode 100644 index 00000000000..899618b8012 --- /dev/null +++ b/Docs/beta/rn/index.rst @@ -0,0 +1,5 @@ +.. _rn-index: + +######### +Release Notes +######### \ No newline at end of file diff --git a/Docs/beta/snippets/torch/apply_adaround.py b/Docs/beta/snippets/torch/apply_adaround.py new file mode 100644 index 00000000000..c09013436c5 --- /dev/null +++ b/Docs/beta/snippets/torch/apply_adaround.py @@ -0,0 +1,22 @@ +# Apply AdaRound +from aimet_common.defs import QuantScheme +from aimet_torch.v1.quantsim import QuantizationSimModel +from aimet_torch.v1.adaround.adaround_weight import Adaround, AdaroundParameters + +params = AdaroundParameters(data_loader=data_loader, num_batches=4, default_num_iterations=32, + default_reg_param=0.01, default_beta_range=(20, 2)) + +input_shape = +dummy_input = torch.randn(input_shape) + +# Returns model with adarounded weights and their corresponding encodings +adarounded_model = Adaround.apply_adaround(, dummy_input, params, path='./', + filename_prefix='', default_param_bw=, + default_quant_scheme=, + default_config_file=None) + +# where +# is the prepared PyTorch model +# is user-defined +# is the bit width to use +# is a selected AIMET quantization scheme diff --git a/Docs/beta/snippets/torch/create_quantizationsimmodel.py b/Docs/beta/snippets/torch/create_quantizationsimmodel.py new file mode 100644 index 00000000000..a400476649f --- /dev/null +++ b/Docs/beta/snippets/torch/create_quantizationsimmodel.py @@ -0,0 +1,13 @@ +from aimet_common.defs import QuantScheme +from aimet_torch.v1.quantsim import QuantizationSimModel +from aimet_torch.v1.adaround.adaround_weight import Adaround, AdaroundParameters + +# Create Quantization Simulation using an adarounded_model +sim = QuantizationSimModel(, quant_scheme=, default_param_bw=, + default_output_bw=, dummy_input=) + +# where +# is a model to which AIMET AdaRound has been applied +# is a selected AIMET quantization scheme +# and are the bit widths of the quantized model +# is any data that conforms to the model input shape. It is not used. \ No newline at end of file diff --git a/Docs/beta/snippets/torch/evaluate.py b/Docs/beta/snippets/torch/evaluate.py new file mode 100644 index 00000000000..55d8923aa28 --- /dev/null +++ b/Docs/beta/snippets/torch/evaluate.py @@ -0,0 +1,3 @@ + # Determine simulated accuracy + accuracy = ImageNetDataPipeline.evaluate(sim.model, use_cuda) + print(accuracy) \ No newline at end of file diff --git a/Docs/beta/snippets/torch/export.py b/Docs/beta/snippets/torch/export.py new file mode 100644 index 00000000000..e36a1a3a3cb --- /dev/null +++ b/Docs/beta/snippets/torch/export.py @@ -0,0 +1,4 @@ + # Export the model + # Export the model which saves pytorch model without any simulation nodes and saves encodings file for both + # activations and parameters in JSON format + model.export(path='./', filename_prefix='', dummy_input=dummy_input.cpu()) diff --git a/Docs/beta/snippets/torch/pass_calibration_data.py b/Docs/beta/snippets/torch/pass_calibration_data.py new file mode 100644 index 00000000000..cb998a2c7f6 --- /dev/null +++ b/Docs/beta/snippets/torch/pass_calibration_data.py @@ -0,0 +1,35 @@ +# PyTorch imports + +import torch +import torch.cuda + +# End of PyTorch imports + +def pass_calibration_data(sim_model): + """ + The User of the QuantizationSimModel API is expected to write this function based on their data set. + This is not a working function and is provided only as a guideline. + + :param sim_model: + :return: + """ + + # User action required + # For computing the activation encodings, around 1000 unlabelled data samples are required. + # Edit the following 2 lines based on your batch size so that + # batch_size * max_batch_counter = 1024 + batch_size = 64 + max_batch_counter = 16 + + sim_model.eval() + + current_batch_counter = 0 + with torch.no_grad(): + for input_data, target_data in data_loader: + + inputs_batch = input_data # labels are ignored + sim_model(inputs_batch) + + current_batch_counter += 1 + if current_batch_counter == max_batch_counter: + break diff --git a/Docs/beta/snippets/torch/prepare_model.py b/Docs/beta/snippets/torch/prepare_model.py new file mode 100644 index 00000000000..6c0d6621622 --- /dev/null +++ b/Docs/beta/snippets/torch/prepare_model.py @@ -0,0 +1,5 @@ +# Prepare the model +from aimet_torch.model_preparer import prepare_model +prepared_model = prepare_model() + +# where is a torch.nn.Module \ No newline at end of file