diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/README.md b/README.md deleted file mode 100644 index b1424e7..0000000 --- a/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# 🤖🌊 aiFlows - -The library, together with tutorials (implementing ReAct, AutoGPT, and many more), will be released in the next few weeks. - -## Do you want to get a notification when the library is released? 🙋‍♀️ 🙋‍♂️ - -Click on the watch icon in the top right corner and subscribe to receive notifications for "All Activity." - - -
- -The library is based on the conceptual framework _Flows_ proposed in [Flows: Building Blocks of Reasoning and Collaborating AI](https://arxiv.org/abs/2308.01285) diff --git a/asset-manifest.json b/asset-manifest.json new file mode 100644 index 0000000..a66738a --- /dev/null +++ b/asset-manifest.json @@ -0,0 +1,13 @@ +{ + "files": { + "main.css": "./static/css/main.94a91037.css", + "main.js": "./static/js/main.710163c6.js", + "index.html": "./index.html", + "main.94a91037.css.map": "./static/css/main.94a91037.css.map", + "main.710163c6.js.map": "./static/js/main.710163c6.js.map" + }, + "entrypoints": [ + "static/css/main.94a91037.css", + "static/js/main.710163c6.js" + ] +} \ No newline at end of file diff --git a/assets/flows_logo_header.png b/assets/flows_logo_header.png new file mode 100644 index 0000000..c68d360 Binary files /dev/null and b/assets/flows_logo_header.png differ diff --git a/assets/flows_logo_nav.png b/assets/flows_logo_nav.png new file mode 100644 index 0000000..102dee8 Binary files /dev/null and b/assets/flows_logo_nav.png differ diff --git a/assets/flows_logo_round.png b/assets/flows_logo_round.png new file mode 100644 index 0000000..4d38276 Binary files /dev/null and b/assets/flows_logo_round.png differ diff --git a/css/bootstrap.css b/css/bootstrap.css new file mode 100644 index 0000000..6167622 --- /dev/null +++ b/css/bootstrap.css @@ -0,0 +1,6757 @@ +/*! + * Bootstrap v3.3.7 (http://getbootstrap.com) + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + */ +/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */ +html { + font-family: sans-serif; + -webkit-text-size-adjust: 100%; + -ms-text-size-adjust: 100%; +} +body { + margin: 0; +} +article, +aside, +details, +figcaption, +figure, +footer, +header, +hgroup, +main, +menu, +nav, +section, +summary { + display: block; +} +audio, +canvas, +progress, +video { + display: inline-block; + vertical-align: baseline; +} +audio:not([controls]) { + display: none; + height: 0; +} +[hidden], +template { + display: none; +} +a { + background-color: transparent; +} +a:active, +a:hover { + outline: 0; +} +abbr[title] { + border-bottom: 1px dotted; +} +b, +strong { + font-weight: bold; +} +dfn { + font-style: italic; +} +h1 { + margin: .67em 0; + font-size: 2em; +} +mark { + color: #000; + background: #ff0; +} +small { + font-size: 80%; +} +sub, +sup { + position: relative; + font-size: 75%; + line-height: 0; + vertical-align: baseline; +} +sup { + top: -.5em; +} +sub { + bottom: -.25em; +} +img { + border: 0; +} +svg:not(:root) { + overflow: hidden; +} +figure { + margin: 1em 40px; +} +hr { + height: 0; + -webkit-box-sizing: content-box; + -moz-box-sizing: content-box; + box-sizing: content-box; +} +pre { + overflow: auto; +} +code, +kbd, +pre, +samp { + font-family: monospace, monospace; + font-size: 1em; +} +button, +input, +optgroup, +select, +textarea { + margin: 0; + font: inherit; + color: inherit; +} +button { + overflow: visible; +} +button, +select { + text-transform: none; +} +button, +html input[type="button"], +input[type="reset"], +input[type="submit"] { + -webkit-appearance: button; + cursor: pointer; +} +button[disabled], +html input[disabled] { + cursor: default; +} +button::-moz-focus-inner, +input::-moz-focus-inner { + padding: 0; + border: 0; +} +input { + line-height: normal; +} +input[type="checkbox"], +input[type="radio"] { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; + padding: 0; +} +input[type="number"]::-webkit-inner-spin-button, +input[type="number"]::-webkit-outer-spin-button { + height: auto; +} +input[type="search"] { + -webkit-box-sizing: content-box; + -moz-box-sizing: content-box; + box-sizing: content-box; + -webkit-appearance: textfield; +} +input[type="search"]::-webkit-search-cancel-button, +input[type="search"]::-webkit-search-decoration { + -webkit-appearance: none; +} +fieldset { + padding: .35em .625em .75em; + margin: 0 2px; + border: 1px solid #c0c0c0; +} +legend { + padding: 0; + border: 0; +} +textarea { + overflow: auto; +} +optgroup { + font-weight: bold; +} +table { + border-spacing: 0; + border-collapse: collapse; +} +td, +th { + padding: 0; +} +/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */ +@media print { + *, + *:before, + *:after { + color: #000 !important; + text-shadow: none !important; + background: transparent !important; + -webkit-box-shadow: none !important; + box-shadow: none !important; + } + a, + a:visited { + text-decoration: underline; + } + a[href]:after { + content: " (" attr(href) ")"; + } + abbr[title]:after { + content: " (" attr(title) ")"; + } + a[href^="#"]:after, + a[href^="javascript:"]:after { + content: ""; + } + pre, + blockquote { + border: 1px solid #999; + + page-break-inside: avoid; + } + thead { + display: table-header-group; + } + tr, + img { + page-break-inside: avoid; + } + img { + max-width: 100% !important; + } + p, + h2, + h3 { + orphans: 3; + widows: 3; + } + h2, + h3 { + page-break-after: avoid; + } + .navbar { + display: none; + } + .btn > .caret, + .dropup > .btn > .caret { + border-top-color: #000 !important; + } + .label { + border: 1px solid #000; + } + .table { + border-collapse: collapse !important; + } + .table td, + .table th { + background-color: #fff !important; + } + .table-bordered th, + .table-bordered td { + border: 1px solid #ddd !important; + } +} +@font-face { + font-family: 'Glyphicons Halflings'; + + src: url('../fonts/glyphicons-halflings-regular.eot'); + src: url('../fonts/glyphicons-halflings-regular.eot?#iefix') format('embedded-opentype'), url('../fonts/glyphicons-halflings-regular.woff2') format('woff2'), url('../fonts/glyphicons-halflings-regular.woff') format('woff'), url('../fonts/glyphicons-halflings-regular.ttf') format('truetype'), url('../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular') format('svg'); +} +.glyphicon { + position: relative; + top: 1px; + display: inline-block; + font-family: 'Glyphicons Halflings'; + font-style: normal; + font-weight: normal; + line-height: 1; + + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} +.glyphicon-asterisk:before { + content: "\002a"; +} +.glyphicon-plus:before { + content: "\002b"; +} +.glyphicon-euro:before, +.glyphicon-eur:before { + content: "\20ac"; +} +.glyphicon-minus:before { + content: "\2212"; +} +.glyphicon-cloud:before { + content: "\2601"; +} +.glyphicon-envelope:before { + content: "\2709"; +} +.glyphicon-pencil:before { + content: "\270f"; +} +.glyphicon-glass:before { + content: "\e001"; +} +.glyphicon-music:before { + content: "\e002"; +} +.glyphicon-search:before { + content: "\e003"; +} +.glyphicon-heart:before { + content: "\e005"; +} +.glyphicon-star:before { + content: "\e006"; +} +.glyphicon-star-empty:before { + content: "\e007"; +} +.glyphicon-user:before { + content: "\e008"; +} +.glyphicon-film:before { + content: "\e009"; +} +.glyphicon-th-large:before { + content: "\e010"; +} +.glyphicon-th:before { + content: "\e011"; +} +.glyphicon-th-list:before { + content: "\e012"; +} +.glyphicon-ok:before { + content: "\e013"; +} +.glyphicon-remove:before { + content: "\e014"; +} +.glyphicon-zoom-in:before { + content: "\e015"; +} +.glyphicon-zoom-out:before { + content: "\e016"; +} +.glyphicon-off:before { + content: "\e017"; +} +.glyphicon-signal:before { + content: "\e018"; +} +.glyphicon-cog:before { + content: "\e019"; +} +.glyphicon-trash:before { + content: "\e020"; +} +.glyphicon-home:before { + content: "\e021"; +} +.glyphicon-file:before { + content: "\e022"; +} +.glyphicon-time:before { + content: "\e023"; +} +.glyphicon-road:before { + content: "\e024"; +} +.glyphicon-download-alt:before { + content: "\e025"; +} +.glyphicon-download:before { + content: "\e026"; +} +.glyphicon-upload:before { + content: "\e027"; +} +.glyphicon-inbox:before { + content: "\e028"; +} +.glyphicon-play-circle:before { + content: "\e029"; +} +.glyphicon-repeat:before { + content: "\e030"; +} +.glyphicon-refresh:before { + content: "\e031"; +} +.glyphicon-list-alt:before { + content: "\e032"; +} +.glyphicon-lock:before { + content: "\e033"; +} +.glyphicon-flag:before { + content: "\e034"; +} +.glyphicon-headphones:before { + content: "\e035"; +} +.glyphicon-volume-off:before { + content: "\e036"; +} +.glyphicon-volume-down:before { + content: "\e037"; +} +.glyphicon-volume-up:before { + content: "\e038"; +} +.glyphicon-qrcode:before { + content: "\e039"; +} +.glyphicon-barcode:before { + content: "\e040"; +} +.glyphicon-tag:before { + content: "\e041"; +} +.glyphicon-tags:before { + content: "\e042"; +} +.glyphicon-book:before { + content: "\e043"; +} +.glyphicon-bookmark:before { + content: "\e044"; +} +.glyphicon-print:before { + content: "\e045"; +} +.glyphicon-camera:before { + content: "\e046"; +} +.glyphicon-font:before { + content: "\e047"; +} +.glyphicon-bold:before { + content: "\e048"; +} +.glyphicon-italic:before { + content: "\e049"; +} +.glyphicon-text-height:before { + content: "\e050"; +} +.glyphicon-text-width:before { + content: "\e051"; +} +.glyphicon-align-left:before { + content: "\e052"; +} +.glyphicon-align-center:before { + content: "\e053"; +} +.glyphicon-align-right:before { + content: "\e054"; +} +.glyphicon-align-justify:before { + content: "\e055"; +} +.glyphicon-list:before { + content: "\e056"; +} +.glyphicon-indent-left:before { + content: "\e057"; +} +.glyphicon-indent-right:before { + content: "\e058"; +} +.glyphicon-facetime-video:before { + content: "\e059"; +} +.glyphicon-picture:before { + content: "\e060"; +} +.glyphicon-map-marker:before { + content: "\e062"; +} +.glyphicon-adjust:before { + content: "\e063"; +} +.glyphicon-tint:before { + content: "\e064"; +} +.glyphicon-edit:before { + content: "\e065"; +} +.glyphicon-share:before { + content: "\e066"; +} +.glyphicon-check:before { + content: "\e067"; +} +.glyphicon-move:before { + content: "\e068"; +} +.glyphicon-step-backward:before { + content: "\e069"; +} +.glyphicon-fast-backward:before { + content: "\e070"; +} +.glyphicon-backward:before { + content: "\e071"; +} +.glyphicon-play:before { + content: "\e072"; +} +.glyphicon-pause:before { + content: "\e073"; +} +.glyphicon-stop:before { + content: "\e074"; +} +.glyphicon-forward:before { + content: "\e075"; +} +.glyphicon-fast-forward:before { + content: "\e076"; +} +.glyphicon-step-forward:before { + content: "\e077"; +} +.glyphicon-eject:before { + content: "\e078"; +} +.glyphicon-chevron-left:before { + content: "\e079"; +} +.glyphicon-chevron-right:before { + content: "\e080"; +} +.glyphicon-plus-sign:before { + content: "\e081"; +} +.glyphicon-minus-sign:before { + content: "\e082"; +} +.glyphicon-remove-sign:before { + content: "\e083"; +} +.glyphicon-ok-sign:before { + content: "\e084"; +} +.glyphicon-question-sign:before { + content: "\e085"; +} +.glyphicon-info-sign:before { + content: "\e086"; +} +.glyphicon-screenshot:before { + content: "\e087"; +} +.glyphicon-remove-circle:before { + content: "\e088"; +} +.glyphicon-ok-circle:before { + content: "\e089"; +} +.glyphicon-ban-circle:before { + content: "\e090"; +} +.glyphicon-arrow-left:before { + content: "\e091"; +} +.glyphicon-arrow-right:before { + content: "\e092"; +} +.glyphicon-arrow-up:before { + content: "\e093"; +} +.glyphicon-arrow-down:before { + content: "\e094"; +} +.glyphicon-share-alt:before { + content: "\e095"; +} +.glyphicon-resize-full:before { + content: "\e096"; +} +.glyphicon-resize-small:before { + content: "\e097"; +} +.glyphicon-exclamation-sign:before { + content: "\e101"; +} +.glyphicon-gift:before { + content: "\e102"; +} +.glyphicon-leaf:before { + content: "\e103"; +} +.glyphicon-fire:before { + content: "\e104"; +} +.glyphicon-eye-open:before { + content: "\e105"; +} +.glyphicon-eye-close:before { + content: "\e106"; +} +.glyphicon-warning-sign:before { + content: "\e107"; +} +.glyphicon-plane:before { + content: "\e108"; +} +.glyphicon-calendar:before { + content: "\e109"; +} +.glyphicon-random:before { + content: "\e110"; +} +.glyphicon-comment:before { + content: "\e111"; +} +.glyphicon-magnet:before { + content: "\e112"; +} +.glyphicon-chevron-up:before { + content: "\e113"; +} +.glyphicon-chevron-down:before { + content: "\e114"; +} +.glyphicon-retweet:before { + content: "\e115"; +} +.glyphicon-shopping-cart:before { + content: "\e116"; +} +.glyphicon-folder-close:before { + content: "\e117"; +} +.glyphicon-folder-open:before { + content: "\e118"; +} +.glyphicon-resize-vertical:before { + content: "\e119"; +} +.glyphicon-resize-horizontal:before { + content: "\e120"; +} +.glyphicon-hdd:before { + content: "\e121"; +} +.glyphicon-bullhorn:before { + content: "\e122"; +} +.glyphicon-bell:before { + content: "\e123"; +} +.glyphicon-certificate:before { + content: "\e124"; +} +.glyphicon-thumbs-up:before { + content: "\e125"; +} +.glyphicon-thumbs-down:before { + content: "\e126"; +} +.glyphicon-hand-right:before { + content: "\e127"; +} +.glyphicon-hand-left:before { + content: "\e128"; +} +.glyphicon-hand-up:before { + content: "\e129"; +} +.glyphicon-hand-down:before { + content: "\e130"; +} +.glyphicon-circle-arrow-right:before { + content: "\e131"; +} +.glyphicon-circle-arrow-left:before { + content: "\e132"; +} +.glyphicon-circle-arrow-up:before { + content: "\e133"; +} +.glyphicon-circle-arrow-down:before { + content: "\e134"; +} +.glyphicon-globe:before { + content: "\e135"; +} +.glyphicon-wrench:before { + content: "\e136"; +} +.glyphicon-tasks:before { + content: "\e137"; +} +.glyphicon-filter:before { + content: "\e138"; +} +.glyphicon-briefcase:before { + content: "\e139"; +} +.glyphicon-fullscreen:before { + content: "\e140"; +} +.glyphicon-dashboard:before { + content: "\e141"; +} +.glyphicon-paperclip:before { + content: "\e142"; +} +.glyphicon-heart-empty:before { + content: "\e143"; +} +.glyphicon-link:before { + content: "\e144"; +} +.glyphicon-phone:before { + content: "\e145"; +} +.glyphicon-pushpin:before { + content: "\e146"; +} +.glyphicon-usd:before { + content: "\e148"; +} +.glyphicon-gbp:before { + content: "\e149"; +} +.glyphicon-sort:before { + content: "\e150"; +} +.glyphicon-sort-by-alphabet:before { + content: "\e151"; +} +.glyphicon-sort-by-alphabet-alt:before { + content: "\e152"; +} +.glyphicon-sort-by-order:before { + content: "\e153"; +} +.glyphicon-sort-by-order-alt:before { + content: "\e154"; +} +.glyphicon-sort-by-attributes:before { + content: "\e155"; +} +.glyphicon-sort-by-attributes-alt:before { + content: "\e156"; +} +.glyphicon-unchecked:before { + content: "\e157"; +} +.glyphicon-expand:before { + content: "\e158"; +} +.glyphicon-collapse-down:before { + content: "\e159"; +} +.glyphicon-collapse-up:before { + content: "\e160"; +} +.glyphicon-log-in:before { + content: "\e161"; +} +.glyphicon-flash:before { + content: "\e162"; +} +.glyphicon-log-out:before { + content: "\e163"; +} +.glyphicon-new-window:before { + content: "\e164"; +} +.glyphicon-record:before { + content: "\e165"; +} +.glyphicon-save:before { + content: "\e166"; +} +.glyphicon-open:before { + content: "\e167"; +} +.glyphicon-saved:before { + content: "\e168"; +} +.glyphicon-import:before { + content: "\e169"; +} +.glyphicon-export:before { + content: "\e170"; +} +.glyphicon-send:before { + content: "\e171"; +} +.glyphicon-floppy-disk:before { + content: "\e172"; +} +.glyphicon-floppy-saved:before { + content: "\e173"; +} +.glyphicon-floppy-remove:before { + content: "\e174"; +} +.glyphicon-floppy-save:before { + content: "\e175"; +} +.glyphicon-floppy-open:before { + content: "\e176"; +} +.glyphicon-credit-card:before { + content: "\e177"; +} +.glyphicon-transfer:before { + content: "\e178"; +} +.glyphicon-cutlery:before { + content: "\e179"; +} +.glyphicon-header:before { + content: "\e180"; +} +.glyphicon-compressed:before { + content: "\e181"; +} +.glyphicon-earphone:before { + content: "\e182"; +} +.glyphicon-phone-alt:before { + content: "\e183"; +} +.glyphicon-tower:before { + content: "\e184"; +} +.glyphicon-stats:before { + content: "\e185"; +} +.glyphicon-sd-video:before { + content: "\e186"; +} +.glyphicon-hd-video:before { + content: "\e187"; +} +.glyphicon-subtitles:before { + content: "\e188"; +} +.glyphicon-sound-stereo:before { + content: "\e189"; +} +.glyphicon-sound-dolby:before { + content: "\e190"; +} +.glyphicon-sound-5-1:before { + content: "\e191"; +} +.glyphicon-sound-6-1:before { + content: "\e192"; +} +.glyphicon-sound-7-1:before { + content: "\e193"; +} +.glyphicon-copyright-mark:before { + content: "\e194"; +} +.glyphicon-registration-mark:before { + content: "\e195"; +} +.glyphicon-cloud-download:before { + content: "\e197"; +} +.glyphicon-cloud-upload:before { + content: "\e198"; +} +.glyphicon-tree-conifer:before { + content: "\e199"; +} +.glyphicon-tree-deciduous:before { + content: "\e200"; +} +.glyphicon-cd:before { + content: "\e201"; +} +.glyphicon-save-file:before { + content: "\e202"; +} +.glyphicon-open-file:before { + content: "\e203"; +} +.glyphicon-level-up:before { + content: "\e204"; +} +.glyphicon-copy:before { + content: "\e205"; +} +.glyphicon-paste:before { + content: "\e206"; +} +.glyphicon-alert:before { + content: "\e209"; +} +.glyphicon-equalizer:before { + content: "\e210"; +} +.glyphicon-king:before { + content: "\e211"; +} +.glyphicon-queen:before { + content: "\e212"; +} +.glyphicon-pawn:before { + content: "\e213"; +} +.glyphicon-bishop:before { + content: "\e214"; +} +.glyphicon-knight:before { + content: "\e215"; +} +.glyphicon-baby-formula:before { + content: "\e216"; +} +.glyphicon-tent:before { + content: "\26fa"; +} +.glyphicon-blackboard:before { + content: "\e218"; +} +.glyphicon-bed:before { + content: "\e219"; +} +.glyphicon-apple:before { + content: "\f8ff"; +} +.glyphicon-erase:before { + content: "\e221"; +} +.glyphicon-hourglass:before { + content: "\231b"; +} +.glyphicon-lamp:before { + content: "\e223"; +} +.glyphicon-duplicate:before { + content: "\e224"; +} +.glyphicon-piggy-bank:before { + content: "\e225"; +} +.glyphicon-scissors:before { + content: "\e226"; +} +.glyphicon-bitcoin:before { + content: "\e227"; +} +.glyphicon-btc:before { + content: "\e227"; +} +.glyphicon-xbt:before { + content: "\e227"; +} +.glyphicon-yen:before { + content: "\00a5"; +} +.glyphicon-jpy:before { + content: "\00a5"; +} +.glyphicon-ruble:before { + content: "\20bd"; +} +.glyphicon-rub:before { + content: "\20bd"; +} +.glyphicon-scale:before { + content: "\e230"; +} +.glyphicon-ice-lolly:before { + content: "\e231"; +} +.glyphicon-ice-lolly-tasted:before { + content: "\e232"; +} +.glyphicon-education:before { + content: "\e233"; +} +.glyphicon-option-horizontal:before { + content: "\e234"; +} +.glyphicon-option-vertical:before { + content: "\e235"; +} +.glyphicon-menu-hamburger:before { + content: "\e236"; +} +.glyphicon-modal-window:before { + content: "\e237"; +} +.glyphicon-oil:before { + content: "\e238"; +} +.glyphicon-grain:before { + content: "\e239"; +} +.glyphicon-sunglasses:before { + content: "\e240"; +} +.glyphicon-text-size:before { + content: "\e241"; +} +.glyphicon-text-color:before { + content: "\e242"; +} +.glyphicon-text-background:before { + content: "\e243"; +} +.glyphicon-object-align-top:before { + content: "\e244"; +} +.glyphicon-object-align-bottom:before { + content: "\e245"; +} +.glyphicon-object-align-horizontal:before { + content: "\e246"; +} +.glyphicon-object-align-left:before { + content: "\e247"; +} +.glyphicon-object-align-vertical:before { + content: "\e248"; +} +.glyphicon-object-align-right:before { + content: "\e249"; +} +.glyphicon-triangle-right:before { + content: "\e250"; +} +.glyphicon-triangle-left:before { + content: "\e251"; +} +.glyphicon-triangle-bottom:before { + content: "\e252"; +} +.glyphicon-triangle-top:before { + content: "\e253"; +} +.glyphicon-console:before { + content: "\e254"; +} +.glyphicon-superscript:before { + content: "\e255"; +} +.glyphicon-subscript:before { + content: "\e256"; +} +.glyphicon-menu-left:before { + content: "\e257"; +} +.glyphicon-menu-right:before { + content: "\e258"; +} +.glyphicon-menu-down:before { + content: "\e259"; +} +.glyphicon-menu-up:before { + content: "\e260"; +} +* { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} +*:before, +*:after { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} +html { + font-size: 10px; + + -webkit-tap-highlight-color: rgba(0, 0, 0, 0); +} +body { + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 14px; + line-height: 1.42857143; + color: #333; + background-color: #fff; +} +input, +button, +select, +textarea { + font-family: inherit; + font-size: inherit; + line-height: inherit; +} +a { + color: #337ab7; + text-decoration: none; +} +a:hover, +a:focus { + color: #23527c; + text-decoration: underline; +} +a:focus { + outline: 5px auto -webkit-focus-ring-color; + outline-offset: -2px; +} +figure { + margin: 0; +} +img { + vertical-align: middle; +} +.img-responsive, +.thumbnail > img, +.thumbnail a > img, +.carousel-inner > .item > img, +.carousel-inner > .item > a > img { + display: block; + max-width: 100%; + height: auto; +} +.img-rounded { + border-radius: 6px; +} +.img-thumbnail { + display: inline-block; + max-width: 100%; + height: auto; + padding: 4px; + line-height: 1.42857143; + background-color: #fff; + border: 1px solid #ddd; + border-radius: 4px; + -webkit-transition: all .2s ease-in-out; + -o-transition: all .2s ease-in-out; + transition: all .2s ease-in-out; +} +.img-circle { + border-radius: 50%; +} +hr { + margin-top: 20px; + margin-bottom: 20px; + border: 0; + border-top: 1px solid #eee; +} +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + border: 0; +} +.sr-only-focusable:active, +.sr-only-focusable:focus { + position: static; + width: auto; + height: auto; + margin: 0; + overflow: visible; + clip: auto; +} +[role="button"] { + cursor: pointer; +} +h1, +h2, +h3, +h4, +h5, +h6, +.h1, +.h2, +.h3, +.h4, +.h5, +.h6 { + font-family: inherit; + font-weight: 500; + line-height: 1.1; + color: inherit; +} +h1 small, +h2 small, +h3 small, +h4 small, +h5 small, +h6 small, +.h1 small, +.h2 small, +.h3 small, +.h4 small, +.h5 small, +.h6 small, +h1 .small, +h2 .small, +h3 .small, +h4 .small, +h5 .small, +h6 .small, +.h1 .small, +.h2 .small, +.h3 .small, +.h4 .small, +.h5 .small, +.h6 .small { + font-weight: normal; + line-height: 1; + color: #777; +} +h1, +.h1, +h2, +.h2, +h3, +.h3 { + margin-top: 20px; + margin-bottom: 10px; +} +h1 small, +.h1 small, +h2 small, +.h2 small, +h3 small, +.h3 small, +h1 .small, +.h1 .small, +h2 .small, +.h2 .small, +h3 .small, +.h3 .small { + font-size: 65%; +} +h4, +.h4, +h5, +.h5, +h6, +.h6 { + margin-top: 10px; + margin-bottom: 10px; +} +h4 small, +.h4 small, +h5 small, +.h5 small, +h6 small, +.h6 small, +h4 .small, +.h4 .small, +h5 .small, +.h5 .small, +h6 .small, +.h6 .small { + font-size: 75%; +} +h1, +.h1 { + font-size: 36px; +} +h2, +.h2 { + font-size: 30px; +} +h3, +.h3 { + font-size: 24px; +} +h4, +.h4 { + font-size: 18px; +} +h5, +.h5 { + font-size: 14px; +} +h6, +.h6 { + font-size: 12px; +} +p { + margin: 0 0 10px; +} +.lead { + margin-bottom: 20px; + font-size: 16px; + font-weight: 300; + line-height: 1.4; +} +@media (min-width: 768px) { + .lead { + font-size: 21px; + } +} +small, +.small { + font-size: 85%; +} +mark, +.mark { + padding: .2em; + background-color: #fcf8e3; +} +.text-left { + text-align: left; +} +.text-right { + text-align: right; +} +.text-center { + text-align: center; +} +.text-justify { + text-align: justify; +} +.text-nowrap { + white-space: nowrap; +} +.text-lowercase { + text-transform: lowercase; +} +.text-uppercase { + text-transform: uppercase; +} +.text-capitalize { + text-transform: capitalize; +} +.text-muted { + color: #777; +} +.text-primary { + color: #337ab7; +} +a.text-primary:hover, +a.text-primary:focus { + color: #286090; +} +.text-success { + color: #3c763d; +} +a.text-success:hover, +a.text-success:focus { + color: #2b542c; +} +.text-info { + color: #31708f; +} +a.text-info:hover, +a.text-info:focus { + color: #245269; +} +.text-warning { + color: #8a6d3b; +} +a.text-warning:hover, +a.text-warning:focus { + color: #66512c; +} +.text-danger { + color: #a94442; +} +a.text-danger:hover, +a.text-danger:focus { + color: #843534; +} +.bg-primary { + color: #fff; + background-color: #337ab7; +} +a.bg-primary:hover, +a.bg-primary:focus { + background-color: #286090; +} +.bg-success { + background-color: #dff0d8; +} +a.bg-success:hover, +a.bg-success:focus { + background-color: #c1e2b3; +} +.bg-info { + background-color: #d9edf7; +} +a.bg-info:hover, +a.bg-info:focus { + background-color: #afd9ee; +} +.bg-warning { + background-color: #fcf8e3; +} +a.bg-warning:hover, +a.bg-warning:focus { + background-color: #f7ecb5; +} +.bg-danger { + background-color: #f2dede; +} +a.bg-danger:hover, +a.bg-danger:focus { + background-color: #e4b9b9; +} +.page-header { + padding-bottom: 9px; + margin: 40px 0 20px; + border-bottom: 1px solid #eee; +} +ul, +ol { + margin-top: 0; + margin-bottom: 10px; +} +ul ul, +ol ul, +ul ol, +ol ol { + margin-bottom: 0; +} +.list-unstyled { + padding-left: 0; + list-style: none; +} +.list-inline { + padding-left: 0; + margin-left: -5px; + list-style: none; +} +.list-inline > li { + display: inline-block; + padding-right: 5px; + padding-left: 5px; +} +dl { + margin-top: 0; + margin-bottom: 20px; +} +dt, +dd { + line-height: 1.42857143; +} +dt { + font-weight: bold; +} +dd { + margin-left: 0; +} +@media (min-width: 768px) { + .dl-horizontal dt { + float: left; + width: 160px; + overflow: hidden; + clear: left; + text-align: right; + text-overflow: ellipsis; + white-space: nowrap; + } + .dl-horizontal dd { + margin-left: 180px; + } +} +abbr[title], +abbr[data-original-title] { + cursor: help; + border-bottom: 1px dotted #777; +} +.initialism { + font-size: 90%; + text-transform: uppercase; +} +blockquote { + padding: 10px 20px; + margin: 0 0 20px; + font-size: 17.5px; + border-left: 5px solid #eee; +} +blockquote p:last-child, +blockquote ul:last-child, +blockquote ol:last-child { + margin-bottom: 0; +} +blockquote footer, +blockquote small, +blockquote .small { + display: block; + font-size: 80%; + line-height: 1.42857143; + color: #777; +} +blockquote footer:before, +blockquote small:before, +blockquote .small:before { + content: '\2014 \00A0'; +} +.blockquote-reverse, +blockquote.pull-right { + padding-right: 15px; + padding-left: 0; + text-align: right; + border-right: 5px solid #eee; + border-left: 0; +} +.blockquote-reverse footer:before, +blockquote.pull-right footer:before, +.blockquote-reverse small:before, +blockquote.pull-right small:before, +.blockquote-reverse .small:before, +blockquote.pull-right .small:before { + content: ''; +} +.blockquote-reverse footer:after, +blockquote.pull-right footer:after, +.blockquote-reverse small:after, +blockquote.pull-right small:after, +.blockquote-reverse .small:after, +blockquote.pull-right .small:after { + content: '\00A0 \2014'; +} +address { + margin-bottom: 20px; + font-style: normal; + line-height: 1.42857143; +} +code, +kbd, +pre, +samp { + font-family: Menlo, Monaco, Consolas, "Courier New", monospace; +} +code { + padding: 2px 4px; + font-size: 90%; + color: #c7254e; + background-color: #f9f2f4; + border-radius: 4px; +} +kbd { + padding: 2px 4px; + font-size: 90%; + color: #fff; + background-color: #333; + border-radius: 3px; + -webkit-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, .25); + box-shadow: inset 0 -1px 0 rgba(0, 0, 0, .25); +} +kbd kbd { + padding: 0; + font-size: 100%; + font-weight: bold; + -webkit-box-shadow: none; + box-shadow: none; +} +pre { + display: block; + padding: 9.5px; + margin: 0 0 10px; + font-size: 13px; + line-height: 1.42857143; + color: #333; + word-break: break-all; + word-wrap: break-word; + background-color: #f5f5f5; + border: 1px solid #ccc; + border-radius: 4px; +} +pre code { + padding: 0; + font-size: inherit; + color: inherit; + white-space: pre-wrap; + background-color: transparent; + border-radius: 0; +} +.pre-scrollable { + max-height: 340px; + overflow-y: scroll; +} +.container { + padding-right: 15px; + padding-left: 15px; + margin-right: auto; + margin-left: auto; +} +@media (min-width: 768px) { + .container { + width: 750px; + } +} +@media (min-width: 992px) { + .container { + width: 970px; + } +} +@media (min-width: 1200px) { + .container { + width: 1170px; + } +} +.container-fluid { + padding-right: 15px; + padding-left: 15px; + margin-right: auto; + margin-left: auto; +} +.row { + margin-right: -15px; + margin-left: -15px; +} +.col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11, .col-xs-12, .col-sm-12, .col-md-12, .col-lg-12 { + position: relative; + min-height: 1px; + padding-right: 15px; + padding-left: 15px; +} +.col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12 { + float: left; +} +.col-xs-12 { + width: 100%; +} +.col-xs-11 { + width: 91.66666667%; +} +.col-xs-10 { + width: 83.33333333%; +} +.col-xs-9 { + width: 75%; +} +.col-xs-8 { + width: 66.66666667%; +} +.col-xs-7 { + width: 58.33333333%; +} +.col-xs-6 { + width: 50%; +} +.col-xs-5 { + width: 41.66666667%; +} +.col-xs-4 { + width: 33.33333333%; +} +.col-xs-3 { + width: 25%; +} +.col-xs-2 { + width: 16.66666667%; +} +.col-xs-1 { + width: 8.33333333%; +} +.col-xs-pull-12 { + right: 100%; +} +.col-xs-pull-11 { + right: 91.66666667%; +} +.col-xs-pull-10 { + right: 83.33333333%; +} +.col-xs-pull-9 { + right: 75%; +} +.col-xs-pull-8 { + right: 66.66666667%; +} +.col-xs-pull-7 { + right: 58.33333333%; +} +.col-xs-pull-6 { + right: 50%; +} +.col-xs-pull-5 { + right: 41.66666667%; +} +.col-xs-pull-4 { + right: 33.33333333%; +} +.col-xs-pull-3 { + right: 25%; +} +.col-xs-pull-2 { + right: 16.66666667%; +} +.col-xs-pull-1 { + right: 8.33333333%; +} +.col-xs-pull-0 { + right: auto; +} +.col-xs-push-12 { + left: 100%; +} +.col-xs-push-11 { + left: 91.66666667%; +} +.col-xs-push-10 { + left: 83.33333333%; +} +.col-xs-push-9 { + left: 75%; +} +.col-xs-push-8 { + left: 66.66666667%; +} +.col-xs-push-7 { + left: 58.33333333%; +} +.col-xs-push-6 { + left: 50%; +} +.col-xs-push-5 { + left: 41.66666667%; +} +.col-xs-push-4 { + left: 33.33333333%; +} +.col-xs-push-3 { + left: 25%; +} +.col-xs-push-2 { + left: 16.66666667%; +} +.col-xs-push-1 { + left: 8.33333333%; +} +.col-xs-push-0 { + left: auto; +} +.col-xs-offset-12 { + margin-left: 100%; +} +.col-xs-offset-11 { + margin-left: 91.66666667%; +} +.col-xs-offset-10 { + margin-left: 83.33333333%; +} +.col-xs-offset-9 { + margin-left: 75%; +} +.col-xs-offset-8 { + margin-left: 66.66666667%; +} +.col-xs-offset-7 { + margin-left: 58.33333333%; +} +.col-xs-offset-6 { + margin-left: 50%; +} +.col-xs-offset-5 { + margin-left: 41.66666667%; +} +.col-xs-offset-4 { + margin-left: 33.33333333%; +} +.col-xs-offset-3 { + margin-left: 25%; +} +.col-xs-offset-2 { + margin-left: 16.66666667%; +} +.col-xs-offset-1 { + margin-left: 8.33333333%; +} +.col-xs-offset-0 { + margin-left: 0; +} +@media (min-width: 768px) { + .col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12 { + float: left; + } + .col-sm-12 { + width: 100%; + } + .col-sm-11 { + width: 91.66666667%; + } + .col-sm-10 { + width: 83.33333333%; + } + .col-sm-9 { + width: 75%; + } + .col-sm-8 { + width: 66.66666667%; + } + .col-sm-7 { + width: 58.33333333%; + } + .col-sm-6 { + width: 50%; + } + .col-sm-5 { + width: 41.66666667%; + } + .col-sm-4 { + width: 33.33333333%; + } + .col-sm-3 { + width: 25%; + } + .col-sm-2 { + width: 16.66666667%; + } + .col-sm-1 { + width: 8.33333333%; + } + .col-sm-pull-12 { + right: 100%; + } + .col-sm-pull-11 { + right: 91.66666667%; + } + .col-sm-pull-10 { + right: 83.33333333%; + } + .col-sm-pull-9 { + right: 75%; + } + .col-sm-pull-8 { + right: 66.66666667%; + } + .col-sm-pull-7 { + right: 58.33333333%; + } + .col-sm-pull-6 { + right: 50%; + } + .col-sm-pull-5 { + right: 41.66666667%; + } + .col-sm-pull-4 { + right: 33.33333333%; + } + .col-sm-pull-3 { + right: 25%; + } + .col-sm-pull-2 { + right: 16.66666667%; + } + .col-sm-pull-1 { + right: 8.33333333%; + } + .col-sm-pull-0 { + right: auto; + } + .col-sm-push-12 { + left: 100%; + } + .col-sm-push-11 { + left: 91.66666667%; + } + .col-sm-push-10 { + left: 83.33333333%; + } + .col-sm-push-9 { + left: 75%; + } + .col-sm-push-8 { + left: 66.66666667%; + } + .col-sm-push-7 { + left: 58.33333333%; + } + .col-sm-push-6 { + left: 50%; + } + .col-sm-push-5 { + left: 41.66666667%; + } + .col-sm-push-4 { + left: 33.33333333%; + } + .col-sm-push-3 { + left: 25%; + } + .col-sm-push-2 { + left: 16.66666667%; + } + .col-sm-push-1 { + left: 8.33333333%; + } + .col-sm-push-0 { + left: auto; + } + .col-sm-offset-12 { + margin-left: 100%; + } + .col-sm-offset-11 { + margin-left: 91.66666667%; + } + .col-sm-offset-10 { + margin-left: 83.33333333%; + } + .col-sm-offset-9 { + margin-left: 75%; + } + .col-sm-offset-8 { + margin-left: 66.66666667%; + } + .col-sm-offset-7 { + margin-left: 58.33333333%; + } + .col-sm-offset-6 { + margin-left: 50%; + } + .col-sm-offset-5 { + margin-left: 41.66666667%; + } + .col-sm-offset-4 { + margin-left: 33.33333333%; + } + .col-sm-offset-3 { + margin-left: 25%; + } + .col-sm-offset-2 { + margin-left: 16.66666667%; + } + .col-sm-offset-1 { + margin-left: 8.33333333%; + } + .col-sm-offset-0 { + margin-left: 0; + } +} +@media (min-width: 992px) { + .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12 { + float: left; + } + .col-md-12 { + width: 100%; + } + .col-md-11 { + width: 91.66666667%; + } + .col-md-10 { + width: 83.33333333%; + } + .col-md-9 { + width: 75%; + } + .col-md-8 { + width: 66.66666667%; + } + .col-md-7 { + width: 58.33333333%; + } + .col-md-6 { + width: 50%; + } + .col-md-5 { + width: 41.66666667%; + } + .col-md-4 { + width: 33.33333333%; + } + .col-md-3 { + width: 25%; + } + .col-md-2 { + width: 16.66666667%; + } + .col-md-1 { + width: 8.33333333%; + } + .col-md-pull-12 { + right: 100%; + } + .col-md-pull-11 { + right: 91.66666667%; + } + .col-md-pull-10 { + right: 83.33333333%; + } + .col-md-pull-9 { + right: 75%; + } + .col-md-pull-8 { + right: 66.66666667%; + } + .col-md-pull-7 { + right: 58.33333333%; + } + .col-md-pull-6 { + right: 50%; + } + .col-md-pull-5 { + right: 41.66666667%; + } + .col-md-pull-4 { + right: 33.33333333%; + } + .col-md-pull-3 { + right: 25%; + } + .col-md-pull-2 { + right: 16.66666667%; + } + .col-md-pull-1 { + right: 8.33333333%; + } + .col-md-pull-0 { + right: auto; + } + .col-md-push-12 { + left: 100%; + } + .col-md-push-11 { + left: 91.66666667%; + } + .col-md-push-10 { + left: 83.33333333%; + } + .col-md-push-9 { + left: 75%; + } + .col-md-push-8 { + left: 66.66666667%; + } + .col-md-push-7 { + left: 58.33333333%; + } + .col-md-push-6 { + left: 50%; + } + .col-md-push-5 { + left: 41.66666667%; + } + .col-md-push-4 { + left: 33.33333333%; + } + .col-md-push-3 { + left: 25%; + } + .col-md-push-2 { + left: 16.66666667%; + } + .col-md-push-1 { + left: 8.33333333%; + } + .col-md-push-0 { + left: auto; + } + .col-md-offset-12 { + margin-left: 100%; + } + .col-md-offset-11 { + margin-left: 91.66666667%; + } + .col-md-offset-10 { + margin-left: 83.33333333%; + } + .col-md-offset-9 { + margin-left: 75%; + } + .col-md-offset-8 { + margin-left: 66.66666667%; + } + .col-md-offset-7 { + margin-left: 58.33333333%; + } + .col-md-offset-6 { + margin-left: 50%; + } + .col-md-offset-5 { + margin-left: 41.66666667%; + } + .col-md-offset-4 { + margin-left: 33.33333333%; + } + .col-md-offset-3 { + margin-left: 25%; + } + .col-md-offset-2 { + margin-left: 16.66666667%; + } + .col-md-offset-1 { + margin-left: 8.33333333%; + } + .col-md-offset-0 { + margin-left: 0; + } +} +@media (min-width: 1200px) { + .col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12 { + float: left; + } + .col-lg-12 { + width: 100%; + } + .col-lg-11 { + width: 91.66666667%; + } + .col-lg-10 { + width: 83.33333333%; + } + .col-lg-9 { + width: 75%; + } + .col-lg-8 { + width: 66.66666667%; + } + .col-lg-7 { + width: 58.33333333%; + } + .col-lg-6 { + width: 50%; + } + .col-lg-5 { + width: 41.66666667%; + } + .col-lg-4 { + width: 33.33333333%; + } + .col-lg-3 { + width: 25%; + } + .col-lg-2 { + width: 16.66666667%; + } + .col-lg-1 { + width: 8.33333333%; + } + .col-lg-pull-12 { + right: 100%; + } + .col-lg-pull-11 { + right: 91.66666667%; + } + .col-lg-pull-10 { + right: 83.33333333%; + } + .col-lg-pull-9 { + right: 75%; + } + .col-lg-pull-8 { + right: 66.66666667%; + } + .col-lg-pull-7 { + right: 58.33333333%; + } + .col-lg-pull-6 { + right: 50%; + } + .col-lg-pull-5 { + right: 41.66666667%; + } + .col-lg-pull-4 { + right: 33.33333333%; + } + .col-lg-pull-3 { + right: 25%; + } + .col-lg-pull-2 { + right: 16.66666667%; + } + .col-lg-pull-1 { + right: 8.33333333%; + } + .col-lg-pull-0 { + right: auto; + } + .col-lg-push-12 { + left: 100%; + } + .col-lg-push-11 { + left: 91.66666667%; + } + .col-lg-push-10 { + left: 83.33333333%; + } + .col-lg-push-9 { + left: 75%; + } + .col-lg-push-8 { + left: 66.66666667%; + } + .col-lg-push-7 { + left: 58.33333333%; + } + .col-lg-push-6 { + left: 50%; + } + .col-lg-push-5 { + left: 41.66666667%; + } + .col-lg-push-4 { + left: 33.33333333%; + } + .col-lg-push-3 { + left: 25%; + } + .col-lg-push-2 { + left: 16.66666667%; + } + .col-lg-push-1 { + left: 8.33333333%; + } + .col-lg-push-0 { + left: auto; + } + .col-lg-offset-12 { + margin-left: 100%; + } + .col-lg-offset-11 { + margin-left: 91.66666667%; + } + .col-lg-offset-10 { + margin-left: 83.33333333%; + } + .col-lg-offset-9 { + margin-left: 75%; + } + .col-lg-offset-8 { + margin-left: 66.66666667%; + } + .col-lg-offset-7 { + margin-left: 58.33333333%; + } + .col-lg-offset-6 { + margin-left: 50%; + } + .col-lg-offset-5 { + margin-left: 41.66666667%; + } + .col-lg-offset-4 { + margin-left: 33.33333333%; + } + .col-lg-offset-3 { + margin-left: 25%; + } + .col-lg-offset-2 { + margin-left: 16.66666667%; + } + .col-lg-offset-1 { + margin-left: 8.33333333%; + } + .col-lg-offset-0 { + margin-left: 0; + } +} +table { + background-color: transparent; +} +caption { + padding-top: 8px; + padding-bottom: 8px; + color: #777; + text-align: left; +} +th { + text-align: left; +} +.table { + width: 100%; + max-width: 100%; + margin-bottom: 20px; +} +.table > thead > tr > th, +.table > tbody > tr > th, +.table > tfoot > tr > th, +.table > thead > tr > td, +.table > tbody > tr > td, +.table > tfoot > tr > td { + padding: 8px; + line-height: 1.42857143; + vertical-align: top; + border-top: 1px solid #ddd; +} +.table > thead > tr > th { + vertical-align: bottom; + border-bottom: 2px solid #ddd; +} +.table > caption + thead > tr:first-child > th, +.table > colgroup + thead > tr:first-child > th, +.table > thead:first-child > tr:first-child > th, +.table > caption + thead > tr:first-child > td, +.table > colgroup + thead > tr:first-child > td, +.table > thead:first-child > tr:first-child > td { + border-top: 0; +} +.table > tbody + tbody { + border-top: 2px solid #ddd; +} +.table .table { + background-color: #fff; +} +.table-condensed > thead > tr > th, +.table-condensed > tbody > tr > th, +.table-condensed > tfoot > tr > th, +.table-condensed > thead > tr > td, +.table-condensed > tbody > tr > td, +.table-condensed > tfoot > tr > td { + padding: 5px; +} +.table-bordered { + border: 1px solid #ddd; +} +.table-bordered > thead > tr > th, +.table-bordered > tbody > tr > th, +.table-bordered > tfoot > tr > th, +.table-bordered > thead > tr > td, +.table-bordered > tbody > tr > td, +.table-bordered > tfoot > tr > td { + border: 1px solid #ddd; +} +.table-bordered > thead > tr > th, +.table-bordered > thead > tr > td { + border-bottom-width: 2px; +} +.table-striped > tbody > tr:nth-of-type(odd) { + background-color: #f9f9f9; +} +.table-hover > tbody > tr:hover { + background-color: #f5f5f5; +} +table col[class*="col-"] { + position: static; + display: table-column; + float: none; +} +table td[class*="col-"], +table th[class*="col-"] { + position: static; + display: table-cell; + float: none; +} +.table > thead > tr > td.active, +.table > tbody > tr > td.active, +.table > tfoot > tr > td.active, +.table > thead > tr > th.active, +.table > tbody > tr > th.active, +.table > tfoot > tr > th.active, +.table > thead > tr.active > td, +.table > tbody > tr.active > td, +.table > tfoot > tr.active > td, +.table > thead > tr.active > th, +.table > tbody > tr.active > th, +.table > tfoot > tr.active > th { + background-color: #f5f5f5; +} +.table-hover > tbody > tr > td.active:hover, +.table-hover > tbody > tr > th.active:hover, +.table-hover > tbody > tr.active:hover > td, +.table-hover > tbody > tr:hover > .active, +.table-hover > tbody > tr.active:hover > th { + background-color: #e8e8e8; +} +.table > thead > tr > td.success, +.table > tbody > tr > td.success, +.table > tfoot > tr > td.success, +.table > thead > tr > th.success, +.table > tbody > tr > th.success, +.table > tfoot > tr > th.success, +.table > thead > tr.success > td, +.table > tbody > tr.success > td, +.table > tfoot > tr.success > td, +.table > thead > tr.success > th, +.table > tbody > tr.success > th, +.table > tfoot > tr.success > th { + background-color: #dff0d8; +} +.table-hover > tbody > tr > td.success:hover, +.table-hover > tbody > tr > th.success:hover, +.table-hover > tbody > tr.success:hover > td, +.table-hover > tbody > tr:hover > .success, +.table-hover > tbody > tr.success:hover > th { + background-color: #d0e9c6; +} +.table > thead > tr > td.info, +.table > tbody > tr > td.info, +.table > tfoot > tr > td.info, +.table > thead > tr > th.info, +.table > tbody > tr > th.info, +.table > tfoot > tr > th.info, +.table > thead > tr.info > td, +.table > tbody > tr.info > td, +.table > tfoot > tr.info > td, +.table > thead > tr.info > th, +.table > tbody > tr.info > th, +.table > tfoot > tr.info > th { + background-color: #d9edf7; +} +.table-hover > tbody > tr > td.info:hover, +.table-hover > tbody > tr > th.info:hover, +.table-hover > tbody > tr.info:hover > td, +.table-hover > tbody > tr:hover > .info, +.table-hover > tbody > tr.info:hover > th { + background-color: #c4e3f3; +} +.table > thead > tr > td.warning, +.table > tbody > tr > td.warning, +.table > tfoot > tr > td.warning, +.table > thead > tr > th.warning, +.table > tbody > tr > th.warning, +.table > tfoot > tr > th.warning, +.table > thead > tr.warning > td, +.table > tbody > tr.warning > td, +.table > tfoot > tr.warning > td, +.table > thead > tr.warning > th, +.table > tbody > tr.warning > th, +.table > tfoot > tr.warning > th { + background-color: #fcf8e3; +} +.table-hover > tbody > tr > td.warning:hover, +.table-hover > tbody > tr > th.warning:hover, +.table-hover > tbody > tr.warning:hover > td, +.table-hover > tbody > tr:hover > .warning, +.table-hover > tbody > tr.warning:hover > th { + background-color: #faf2cc; +} +.table > thead > tr > td.danger, +.table > tbody > tr > td.danger, +.table > tfoot > tr > td.danger, +.table > thead > tr > th.danger, +.table > tbody > tr > th.danger, +.table > tfoot > tr > th.danger, +.table > thead > tr.danger > td, +.table > tbody > tr.danger > td, +.table > tfoot > tr.danger > td, +.table > thead > tr.danger > th, +.table > tbody > tr.danger > th, +.table > tfoot > tr.danger > th { + background-color: #f2dede; +} +.table-hover > tbody > tr > td.danger:hover, +.table-hover > tbody > tr > th.danger:hover, +.table-hover > tbody > tr.danger:hover > td, +.table-hover > tbody > tr:hover > .danger, +.table-hover > tbody > tr.danger:hover > th { + background-color: #ebcccc; +} +.table-responsive { + min-height: .01%; + overflow-x: auto; +} +@media screen and (max-width: 767px) { + .table-responsive { + width: 100%; + margin-bottom: 15px; + overflow-y: hidden; + -ms-overflow-style: -ms-autohiding-scrollbar; + border: 1px solid #ddd; + } + .table-responsive > .table { + margin-bottom: 0; + } + .table-responsive > .table > thead > tr > th, + .table-responsive > .table > tbody > tr > th, + .table-responsive > .table > tfoot > tr > th, + .table-responsive > .table > thead > tr > td, + .table-responsive > .table > tbody > tr > td, + .table-responsive > .table > tfoot > tr > td { + white-space: nowrap; + } + .table-responsive > .table-bordered { + border: 0; + } + .table-responsive > .table-bordered > thead > tr > th:first-child, + .table-responsive > .table-bordered > tbody > tr > th:first-child, + .table-responsive > .table-bordered > tfoot > tr > th:first-child, + .table-responsive > .table-bordered > thead > tr > td:first-child, + .table-responsive > .table-bordered > tbody > tr > td:first-child, + .table-responsive > .table-bordered > tfoot > tr > td:first-child { + border-left: 0; + } + .table-responsive > .table-bordered > thead > tr > th:last-child, + .table-responsive > .table-bordered > tbody > tr > th:last-child, + .table-responsive > .table-bordered > tfoot > tr > th:last-child, + .table-responsive > .table-bordered > thead > tr > td:last-child, + .table-responsive > .table-bordered > tbody > tr > td:last-child, + .table-responsive > .table-bordered > tfoot > tr > td:last-child { + border-right: 0; + } + .table-responsive > .table-bordered > tbody > tr:last-child > th, + .table-responsive > .table-bordered > tfoot > tr:last-child > th, + .table-responsive > .table-bordered > tbody > tr:last-child > td, + .table-responsive > .table-bordered > tfoot > tr:last-child > td { + border-bottom: 0; + } +} +fieldset { + min-width: 0; + padding: 0; + margin: 0; + border: 0; +} +legend { + display: block; + width: 100%; + padding: 0; + margin-bottom: 20px; + font-size: 21px; + line-height: inherit; + color: #333; + border: 0; + border-bottom: 1px solid #e5e5e5; +} +label { + display: inline-block; + max-width: 100%; + margin-bottom: 5px; + font-weight: bold; +} +input[type="search"] { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} +input[type="radio"], +input[type="checkbox"] { + margin: 4px 0 0; + margin-top: 1px \9; + line-height: normal; +} +input[type="file"] { + display: block; +} +input[type="range"] { + display: block; + width: 100%; +} +select[multiple], +select[size] { + height: auto; +} +input[type="file"]:focus, +input[type="radio"]:focus, +input[type="checkbox"]:focus { + outline: 5px auto -webkit-focus-ring-color; + outline-offset: -2px; +} +output { + display: block; + padding-top: 7px; + font-size: 14px; + line-height: 1.42857143; + color: #555; +} +.form-control { + display: block; + width: 100%; + height: 34px; + padding: 6px 12px; + font-size: 14px; + line-height: 1.42857143; + color: #555; + background-color: #fff; + background-image: none; + border: 1px solid #ccc; + border-radius: 4px; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); + -webkit-transition: border-color ease-in-out .15s, -webkit-box-shadow ease-in-out .15s; + -o-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s; + transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s; +} +.form-control:focus { + border-color: #66afe9; + outline: 0; + -webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, .6); + box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, .6); +} +.form-control::-moz-placeholder { + color: #999; + opacity: 1; +} +.form-control:-ms-input-placeholder { + color: #999; +} +.form-control::-webkit-input-placeholder { + color: #999; +} +.form-control::-ms-expand { + background-color: transparent; + border: 0; +} +.form-control[disabled], +.form-control[readonly], +fieldset[disabled] .form-control { + background-color: #eee; + opacity: 1; +} +.form-control[disabled], +fieldset[disabled] .form-control { + cursor: not-allowed; +} +textarea.form-control { + height: auto; +} +input[type="search"] { + -webkit-appearance: none; +} +@media screen and (-webkit-min-device-pixel-ratio: 0) { + input[type="date"].form-control, + input[type="time"].form-control, + input[type="datetime-local"].form-control, + input[type="month"].form-control { + line-height: 34px; + } + input[type="date"].input-sm, + input[type="time"].input-sm, + input[type="datetime-local"].input-sm, + input[type="month"].input-sm, + .input-group-sm input[type="date"], + .input-group-sm input[type="time"], + .input-group-sm input[type="datetime-local"], + .input-group-sm input[type="month"] { + line-height: 30px; + } + input[type="date"].input-lg, + input[type="time"].input-lg, + input[type="datetime-local"].input-lg, + input[type="month"].input-lg, + .input-group-lg input[type="date"], + .input-group-lg input[type="time"], + .input-group-lg input[type="datetime-local"], + .input-group-lg input[type="month"] { + line-height: 46px; + } +} +.form-group { + margin-bottom: 15px; +} +.radio, +.checkbox { + position: relative; + display: block; + margin-top: 10px; + margin-bottom: 10px; +} +.radio label, +.checkbox label { + min-height: 20px; + padding-left: 20px; + margin-bottom: 0; + font-weight: normal; + cursor: pointer; +} +.radio input[type="radio"], +.radio-inline input[type="radio"], +.checkbox input[type="checkbox"], +.checkbox-inline input[type="checkbox"] { + position: absolute; + margin-top: 4px \9; + margin-left: -20px; +} +.radio + .radio, +.checkbox + .checkbox { + margin-top: -5px; +} +.radio-inline, +.checkbox-inline { + position: relative; + display: inline-block; + padding-left: 20px; + margin-bottom: 0; + font-weight: normal; + vertical-align: middle; + cursor: pointer; +} +.radio-inline + .radio-inline, +.checkbox-inline + .checkbox-inline { + margin-top: 0; + margin-left: 10px; +} +input[type="radio"][disabled], +input[type="checkbox"][disabled], +input[type="radio"].disabled, +input[type="checkbox"].disabled, +fieldset[disabled] input[type="radio"], +fieldset[disabled] input[type="checkbox"] { + cursor: not-allowed; +} +.radio-inline.disabled, +.checkbox-inline.disabled, +fieldset[disabled] .radio-inline, +fieldset[disabled] .checkbox-inline { + cursor: not-allowed; +} +.radio.disabled label, +.checkbox.disabled label, +fieldset[disabled] .radio label, +fieldset[disabled] .checkbox label { + cursor: not-allowed; +} +.form-control-static { + min-height: 34px; + padding-top: 7px; + padding-bottom: 7px; + margin-bottom: 0; +} +.form-control-static.input-lg, +.form-control-static.input-sm { + padding-right: 0; + padding-left: 0; +} +.input-sm { + height: 30px; + padding: 5px 10px; + font-size: 12px; + line-height: 1.5; + border-radius: 3px; +} +select.input-sm { + height: 30px; + line-height: 30px; +} +textarea.input-sm, +select[multiple].input-sm { + height: auto; +} +.form-group-sm .form-control { + height: 30px; + padding: 5px 10px; + font-size: 12px; + line-height: 1.5; + border-radius: 3px; +} +.form-group-sm select.form-control { + height: 30px; + line-height: 30px; +} +.form-group-sm textarea.form-control, +.form-group-sm select[multiple].form-control { + height: auto; +} +.form-group-sm .form-control-static { + height: 30px; + min-height: 32px; + padding: 6px 10px; + font-size: 12px; + line-height: 1.5; +} +.input-lg { + height: 46px; + padding: 10px 16px; + font-size: 18px; + line-height: 1.3333333; + border-radius: 6px; +} +select.input-lg { + height: 46px; + line-height: 46px; +} +textarea.input-lg, +select[multiple].input-lg { + height: auto; +} +.form-group-lg .form-control { + height: 46px; + padding: 10px 16px; + font-size: 18px; + line-height: 1.3333333; + border-radius: 6px; +} +.form-group-lg select.form-control { + height: 46px; + line-height: 46px; +} +.form-group-lg textarea.form-control, +.form-group-lg select[multiple].form-control { + height: auto; +} +.form-group-lg .form-control-static { + height: 46px; + min-height: 38px; + padding: 11px 16px; + font-size: 18px; + line-height: 1.3333333; +} +.has-feedback { + position: relative; +} +.has-feedback .form-control { + padding-right: 42.5px; +} +.form-control-feedback { + position: absolute; + top: 0; + right: 0; + z-index: 2; + display: block; + width: 34px; + height: 34px; + line-height: 34px; + text-align: center; + pointer-events: none; +} +.input-lg + .form-control-feedback, +.input-group-lg + .form-control-feedback, +.form-group-lg .form-control + .form-control-feedback { + width: 46px; + height: 46px; + line-height: 46px; +} +.input-sm + .form-control-feedback, +.input-group-sm + .form-control-feedback, +.form-group-sm .form-control + .form-control-feedback { + width: 30px; + height: 30px; + line-height: 30px; +} +.has-success .help-block, +.has-success .control-label, +.has-success .radio, +.has-success .checkbox, +.has-success .radio-inline, +.has-success .checkbox-inline, +.has-success.radio label, +.has-success.checkbox label, +.has-success.radio-inline label, +.has-success.checkbox-inline label { + color: #3c763d; +} +.has-success .form-control { + border-color: #3c763d; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); +} +.has-success .form-control:focus { + border-color: #2b542c; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 6px #67b168; + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 6px #67b168; +} +.has-success .input-group-addon { + color: #3c763d; + background-color: #dff0d8; + border-color: #3c763d; +} +.has-success .form-control-feedback { + color: #3c763d; +} +.has-warning .help-block, +.has-warning .control-label, +.has-warning .radio, +.has-warning .checkbox, +.has-warning .radio-inline, +.has-warning .checkbox-inline, +.has-warning.radio label, +.has-warning.checkbox label, +.has-warning.radio-inline label, +.has-warning.checkbox-inline label { + color: #8a6d3b; +} +.has-warning .form-control { + border-color: #8a6d3b; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); +} +.has-warning .form-control:focus { + border-color: #66512c; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 6px #c0a16b; + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 6px #c0a16b; +} +.has-warning .input-group-addon { + color: #8a6d3b; + background-color: #fcf8e3; + border-color: #8a6d3b; +} +.has-warning .form-control-feedback { + color: #8a6d3b; +} +.has-error .help-block, +.has-error .control-label, +.has-error .radio, +.has-error .checkbox, +.has-error .radio-inline, +.has-error .checkbox-inline, +.has-error.radio label, +.has-error.checkbox label, +.has-error.radio-inline label, +.has-error.checkbox-inline label { + color: #a94442; +} +.has-error .form-control { + border-color: #a94442; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075); +} +.has-error .form-control:focus { + border-color: #843534; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 6px #ce8483; + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 6px #ce8483; +} +.has-error .input-group-addon { + color: #a94442; + background-color: #f2dede; + border-color: #a94442; +} +.has-error .form-control-feedback { + color: #a94442; +} +.has-feedback label ~ .form-control-feedback { + top: 25px; +} +.has-feedback label.sr-only ~ .form-control-feedback { + top: 0; +} +.help-block { + display: block; + margin-top: 5px; + margin-bottom: 10px; + color: #737373; +} +@media (min-width: 768px) { + .form-inline .form-group { + display: inline-block; + margin-bottom: 0; + vertical-align: middle; + } + .form-inline .form-control { + display: inline-block; + width: auto; + vertical-align: middle; + } + .form-inline .form-control-static { + display: inline-block; + } + .form-inline .input-group { + display: inline-table; + vertical-align: middle; + } + .form-inline .input-group .input-group-addon, + .form-inline .input-group .input-group-btn, + .form-inline .input-group .form-control { + width: auto; + } + .form-inline .input-group > .form-control { + width: 100%; + } + .form-inline .control-label { + margin-bottom: 0; + vertical-align: middle; + } + .form-inline .radio, + .form-inline .checkbox { + display: inline-block; + margin-top: 0; + margin-bottom: 0; + vertical-align: middle; + } + .form-inline .radio label, + .form-inline .checkbox label { + padding-left: 0; + } + .form-inline .radio input[type="radio"], + .form-inline .checkbox input[type="checkbox"] { + position: relative; + margin-left: 0; + } + .form-inline .has-feedback .form-control-feedback { + top: 0; + } +} +.form-horizontal .radio, +.form-horizontal .checkbox, +.form-horizontal .radio-inline, +.form-horizontal .checkbox-inline { + padding-top: 7px; + margin-top: 0; + margin-bottom: 0; +} +.form-horizontal .radio, +.form-horizontal .checkbox { + min-height: 27px; +} +.form-horizontal .form-group { + margin-right: -15px; + margin-left: -15px; +} +@media (min-width: 768px) { + .form-horizontal .control-label { + padding-top: 7px; + margin-bottom: 0; + text-align: right; + } +} +.form-horizontal .has-feedback .form-control-feedback { + right: 15px; +} +@media (min-width: 768px) { + .form-horizontal .form-group-lg .control-label { + padding-top: 11px; + font-size: 18px; + } +} +@media (min-width: 768px) { + .form-horizontal .form-group-sm .control-label { + padding-top: 6px; + font-size: 12px; + } +} +.btn { + display: inline-block; + padding: 6px 12px; + margin-bottom: 0; + font-size: 14px; + font-weight: normal; + line-height: 1.42857143; + text-align: center; + white-space: nowrap; + vertical-align: middle; + -ms-touch-action: manipulation; + touch-action: manipulation; + cursor: pointer; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + background-image: none; + border: 1px solid transparent; + border-radius: 4px; +} +.btn:focus, +.btn:active:focus, +.btn.active:focus, +.btn.focus, +.btn:active.focus, +.btn.active.focus { + outline: 5px auto -webkit-focus-ring-color; + outline-offset: -2px; +} +.btn:hover, +.btn:focus, +.btn.focus { + color: #333; + text-decoration: none; +} +.btn:active, +.btn.active { + background-image: none; + outline: 0; + -webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125); + box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125); +} +.btn.disabled, +.btn[disabled], +fieldset[disabled] .btn { + cursor: not-allowed; + filter: alpha(opacity=65); + -webkit-box-shadow: none; + box-shadow: none; + opacity: .65; +} +a.btn.disabled, +fieldset[disabled] a.btn { + pointer-events: none; +} +.btn-default { + color: #333; + background-color: #fff; + border-color: #ccc; +} +.btn-default:focus, +.btn-default.focus { + color: #333; + background-color: #e6e6e6; + border-color: #8c8c8c; +} +.btn-default:hover { + color: #333; + background-color: #e6e6e6; + border-color: #adadad; +} +.btn-default:active, +.btn-default.active, +.open > .dropdown-toggle.btn-default { + color: #333; + background-color: #e6e6e6; + border-color: #adadad; +} +.btn-default:active:hover, +.btn-default.active:hover, +.open > .dropdown-toggle.btn-default:hover, +.btn-default:active:focus, +.btn-default.active:focus, +.open > .dropdown-toggle.btn-default:focus, +.btn-default:active.focus, +.btn-default.active.focus, +.open > .dropdown-toggle.btn-default.focus { + color: #333; + background-color: #d4d4d4; + border-color: #8c8c8c; +} +.btn-default:active, +.btn-default.active, +.open > .dropdown-toggle.btn-default { + background-image: none; +} +.btn-default.disabled:hover, +.btn-default[disabled]:hover, +fieldset[disabled] .btn-default:hover, +.btn-default.disabled:focus, +.btn-default[disabled]:focus, +fieldset[disabled] .btn-default:focus, +.btn-default.disabled.focus, +.btn-default[disabled].focus, +fieldset[disabled] .btn-default.focus { + background-color: #fff; + border-color: #ccc; +} +.btn-default .badge { + color: #fff; + background-color: #333; +} +.btn-primary { + color: #fff; + background-color: #337ab7; + border-color: #2e6da4; +} +.btn-primary:focus, +.btn-primary.focus { + color: #fff; + background-color: #286090; + border-color: #122b40; +} +.btn-primary:hover { + color: #fff; + background-color: #286090; + border-color: #204d74; +} +.btn-primary:active, +.btn-primary.active, +.open > .dropdown-toggle.btn-primary { + color: #fff; + background-color: #286090; + border-color: #204d74; +} +.btn-primary:active:hover, +.btn-primary.active:hover, +.open > .dropdown-toggle.btn-primary:hover, +.btn-primary:active:focus, +.btn-primary.active:focus, +.open > .dropdown-toggle.btn-primary:focus, +.btn-primary:active.focus, +.btn-primary.active.focus, +.open > .dropdown-toggle.btn-primary.focus { + color: #fff; + background-color: #204d74; + border-color: #122b40; +} +.btn-primary:active, +.btn-primary.active, +.open > .dropdown-toggle.btn-primary { + background-image: none; +} +.btn-primary.disabled:hover, +.btn-primary[disabled]:hover, +fieldset[disabled] .btn-primary:hover, +.btn-primary.disabled:focus, +.btn-primary[disabled]:focus, +fieldset[disabled] .btn-primary:focus, +.btn-primary.disabled.focus, +.btn-primary[disabled].focus, +fieldset[disabled] .btn-primary.focus { + background-color: #337ab7; + border-color: #2e6da4; +} +.btn-primary .badge { + color: #337ab7; + background-color: #fff; +} +.btn-success { + color: #fff; + background-color: #5cb85c; + border-color: #4cae4c; +} +.btn-success:focus, +.btn-success.focus { + color: #fff; + background-color: #449d44; + border-color: #255625; +} +.btn-success:hover { + color: #fff; + background-color: #449d44; + border-color: #398439; +} +.btn-success:active, +.btn-success.active, +.open > .dropdown-toggle.btn-success { + color: #fff; + background-color: #449d44; + border-color: #398439; +} +.btn-success:active:hover, +.btn-success.active:hover, +.open > .dropdown-toggle.btn-success:hover, +.btn-success:active:focus, +.btn-success.active:focus, +.open > .dropdown-toggle.btn-success:focus, +.btn-success:active.focus, +.btn-success.active.focus, +.open > .dropdown-toggle.btn-success.focus { + color: #fff; + background-color: #398439; + border-color: #255625; +} +.btn-success:active, +.btn-success.active, +.open > .dropdown-toggle.btn-success { + background-image: none; +} +.btn-success.disabled:hover, +.btn-success[disabled]:hover, +fieldset[disabled] .btn-success:hover, +.btn-success.disabled:focus, +.btn-success[disabled]:focus, +fieldset[disabled] .btn-success:focus, +.btn-success.disabled.focus, +.btn-success[disabled].focus, +fieldset[disabled] .btn-success.focus { + background-color: #5cb85c; + border-color: #4cae4c; +} +.btn-success .badge { + color: #5cb85c; + background-color: #fff; +} +.btn-info { + color: #fff; + background-color: #5bc0de; + border-color: #46b8da; +} +.btn-info:focus, +.btn-info.focus { + color: #fff; + background-color: #31b0d5; + border-color: #1b6d85; +} +.btn-info:hover { + color: #fff; + background-color: #31b0d5; + border-color: #269abc; +} +.btn-info:active, +.btn-info.active, +.open > .dropdown-toggle.btn-info { + color: #fff; + background-color: #31b0d5; + border-color: #269abc; +} +.btn-info:active:hover, +.btn-info.active:hover, +.open > .dropdown-toggle.btn-info:hover, +.btn-info:active:focus, +.btn-info.active:focus, +.open > .dropdown-toggle.btn-info:focus, +.btn-info:active.focus, +.btn-info.active.focus, +.open > .dropdown-toggle.btn-info.focus { + color: #fff; + background-color: #269abc; + border-color: #1b6d85; +} +.btn-info:active, +.btn-info.active, +.open > .dropdown-toggle.btn-info { + background-image: none; +} +.btn-info.disabled:hover, +.btn-info[disabled]:hover, +fieldset[disabled] .btn-info:hover, +.btn-info.disabled:focus, +.btn-info[disabled]:focus, +fieldset[disabled] .btn-info:focus, +.btn-info.disabled.focus, +.btn-info[disabled].focus, +fieldset[disabled] .btn-info.focus { + background-color: #5bc0de; + border-color: #46b8da; +} +.btn-info .badge { + color: #5bc0de; + background-color: #fff; +} +.btn-warning { + color: #fff; + background-color: #f0ad4e; + border-color: #eea236; +} +.btn-warning:focus, +.btn-warning.focus { + color: #fff; + background-color: #ec971f; + border-color: #985f0d; +} +.btn-warning:hover { + color: #fff; + background-color: #ec971f; + border-color: #d58512; +} +.btn-warning:active, +.btn-warning.active, +.open > .dropdown-toggle.btn-warning { + color: #fff; + background-color: #ec971f; + border-color: #d58512; +} +.btn-warning:active:hover, +.btn-warning.active:hover, +.open > .dropdown-toggle.btn-warning:hover, +.btn-warning:active:focus, +.btn-warning.active:focus, +.open > .dropdown-toggle.btn-warning:focus, +.btn-warning:active.focus, +.btn-warning.active.focus, +.open > .dropdown-toggle.btn-warning.focus { + color: #fff; + background-color: #d58512; + border-color: #985f0d; +} +.btn-warning:active, +.btn-warning.active, +.open > .dropdown-toggle.btn-warning { + background-image: none; +} +.btn-warning.disabled:hover, +.btn-warning[disabled]:hover, +fieldset[disabled] .btn-warning:hover, +.btn-warning.disabled:focus, +.btn-warning[disabled]:focus, +fieldset[disabled] .btn-warning:focus, +.btn-warning.disabled.focus, +.btn-warning[disabled].focus, +fieldset[disabled] .btn-warning.focus { + background-color: #f0ad4e; + border-color: #eea236; +} +.btn-warning .badge { + color: #f0ad4e; + background-color: #fff; +} +.btn-danger { + color: #fff; + background-color: #d9534f; + border-color: #d43f3a; +} +.btn-danger:focus, +.btn-danger.focus { + color: #fff; + background-color: #c9302c; + border-color: #761c19; +} +.btn-danger:hover { + color: #fff; + background-color: #c9302c; + border-color: #ac2925; +} +.btn-danger:active, +.btn-danger.active, +.open > .dropdown-toggle.btn-danger { + color: #fff; + background-color: #c9302c; + border-color: #ac2925; +} +.btn-danger:active:hover, +.btn-danger.active:hover, +.open > .dropdown-toggle.btn-danger:hover, +.btn-danger:active:focus, +.btn-danger.active:focus, +.open > .dropdown-toggle.btn-danger:focus, +.btn-danger:active.focus, +.btn-danger.active.focus, +.open > .dropdown-toggle.btn-danger.focus { + color: #fff; + background-color: #ac2925; + border-color: #761c19; +} +.btn-danger:active, +.btn-danger.active, +.open > .dropdown-toggle.btn-danger { + background-image: none; +} +.btn-danger.disabled:hover, +.btn-danger[disabled]:hover, +fieldset[disabled] .btn-danger:hover, +.btn-danger.disabled:focus, +.btn-danger[disabled]:focus, +fieldset[disabled] .btn-danger:focus, +.btn-danger.disabled.focus, +.btn-danger[disabled].focus, +fieldset[disabled] .btn-danger.focus { + background-color: #d9534f; + border-color: #d43f3a; +} +.btn-danger .badge { + color: #d9534f; + background-color: #fff; +} +.btn-link { + font-weight: normal; + color: #337ab7; + border-radius: 0; +} +.btn-link, +.btn-link:active, +.btn-link.active, +.btn-link[disabled], +fieldset[disabled] .btn-link { + background-color: transparent; + -webkit-box-shadow: none; + box-shadow: none; +} +.btn-link, +.btn-link:hover, +.btn-link:focus, +.btn-link:active { + border-color: transparent; +} +.btn-link:hover, +.btn-link:focus { + color: #23527c; + text-decoration: underline; + background-color: transparent; +} +.btn-link[disabled]:hover, +fieldset[disabled] .btn-link:hover, +.btn-link[disabled]:focus, +fieldset[disabled] .btn-link:focus { + color: #777; + text-decoration: none; +} +.btn-lg, +.btn-group-lg > .btn { + padding: 10px 16px; + font-size: 18px; + line-height: 1.3333333; + border-radius: 6px; +} +.btn-sm, +.btn-group-sm > .btn { + padding: 5px 10px; + font-size: 12px; + line-height: 1.5; + border-radius: 3px; +} +.btn-xs, +.btn-group-xs > .btn { + padding: 1px 5px; + font-size: 12px; + line-height: 1.5; + border-radius: 3px; +} +.btn-block { + display: block; + width: 100%; +} +.btn-block + .btn-block { + margin-top: 5px; +} +input[type="submit"].btn-block, +input[type="reset"].btn-block, +input[type="button"].btn-block { + width: 100%; +} +.fade { + opacity: 0; + -webkit-transition: opacity .15s linear; + -o-transition: opacity .15s linear; + transition: opacity .15s linear; +} +.fade.in { + opacity: 1; +} +.collapse { + display: none; +} +.collapse.in { + display: block; +} +tr.collapse.in { + display: table-row; +} +tbody.collapse.in { + display: table-row-group; +} +.collapsing { + position: relative; + height: 0; + overflow: hidden; + -webkit-transition-timing-function: ease; + -o-transition-timing-function: ease; + transition-timing-function: ease; + -webkit-transition-duration: .35s; + -o-transition-duration: .35s; + transition-duration: .35s; + -webkit-transition-property: height, visibility; + -o-transition-property: height, visibility; + transition-property: height, visibility; +} +.caret { + display: inline-block; + width: 0; + height: 0; + margin-left: 2px; + vertical-align: middle; + border-top: 4px dashed; + border-top: 4px solid \9; + border-right: 4px solid transparent; + border-left: 4px solid transparent; +} +.dropup, +.dropdown { + position: relative; +} +.dropdown-toggle:focus { + outline: 0; +} +.dropdown-menu { + position: absolute; + top: 100%; + left: 0; + z-index: 1000; + display: none; + float: left; + min-width: 160px; + padding: 5px 0; + margin: 2px 0 0; + font-size: 14px; + text-align: left; + list-style: none; + background-color: #fff; + -webkit-background-clip: padding-box; + background-clip: padding-box; + border: 1px solid #ccc; + border: 1px solid rgba(0, 0, 0, .15); + border-radius: 4px; + -webkit-box-shadow: 0 6px 12px rgba(0, 0, 0, .175); + box-shadow: 0 6px 12px rgba(0, 0, 0, .175); +} +.dropdown-menu.pull-right { + right: 0; + left: auto; +} +.dropdown-menu .divider { + height: 1px; + margin: 9px 0; + overflow: hidden; + background-color: #e5e5e5; +} +.dropdown-menu > li > a { + display: block; + padding: 3px 20px; + clear: both; + font-weight: normal; + line-height: 1.42857143; + color: #333; + white-space: nowrap; +} +.dropdown-menu > li > a:hover, +.dropdown-menu > li > a:focus { + color: #262626; + text-decoration: none; + background-color: #f5f5f5; +} +.dropdown-menu > .active > a, +.dropdown-menu > .active > a:hover, +.dropdown-menu > .active > a:focus { + color: #fff; + text-decoration: none; + background-color: #337ab7; + outline: 0; +} +.dropdown-menu > .disabled > a, +.dropdown-menu > .disabled > a:hover, +.dropdown-menu > .disabled > a:focus { + color: #777; +} +.dropdown-menu > .disabled > a:hover, +.dropdown-menu > .disabled > a:focus { + text-decoration: none; + cursor: not-allowed; + background-color: transparent; + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); +} +.open > .dropdown-menu { + display: block; +} +.open > a { + outline: 0; +} +.dropdown-menu-right { + right: 0; + left: auto; +} +.dropdown-menu-left { + right: auto; + left: 0; +} +.dropdown-header { + display: block; + padding: 3px 20px; + font-size: 12px; + line-height: 1.42857143; + color: #777; + white-space: nowrap; +} +.dropdown-backdrop { + position: fixed; + top: 0; + right: 0; + bottom: 0; + left: 0; + z-index: 990; +} +.pull-right > .dropdown-menu { + right: 0; + left: auto; +} +.dropup .caret, +.navbar-fixed-bottom .dropdown .caret { + content: ""; + border-top: 0; + border-bottom: 4px dashed; + border-bottom: 4px solid \9; +} +.dropup .dropdown-menu, +.navbar-fixed-bottom .dropdown .dropdown-menu { + top: auto; + bottom: 100%; + margin-bottom: 2px; +} +@media (min-width: 768px) { + .navbar-right .dropdown-menu { + right: 0; + left: auto; + } + .navbar-right .dropdown-menu-left { + right: auto; + left: 0; + } +} +.btn-group, +.btn-group-vertical { + position: relative; + display: inline-block; + vertical-align: middle; +} +.btn-group > .btn, +.btn-group-vertical > .btn { + position: relative; + float: left; +} +.btn-group > .btn:hover, +.btn-group-vertical > .btn:hover, +.btn-group > .btn:focus, +.btn-group-vertical > .btn:focus, +.btn-group > .btn:active, +.btn-group-vertical > .btn:active, +.btn-group > .btn.active, +.btn-group-vertical > .btn.active { + z-index: 2; +} +.btn-group .btn + .btn, +.btn-group .btn + .btn-group, +.btn-group .btn-group + .btn, +.btn-group .btn-group + .btn-group { + margin-left: -1px; +} +.btn-toolbar { + margin-left: -5px; +} +.btn-toolbar .btn, +.btn-toolbar .btn-group, +.btn-toolbar .input-group { + float: left; +} +.btn-toolbar > .btn, +.btn-toolbar > .btn-group, +.btn-toolbar > .input-group { + margin-left: 5px; +} +.btn-group > .btn:not(:first-child):not(:last-child):not(.dropdown-toggle) { + border-radius: 0; +} +.btn-group > .btn:first-child { + margin-left: 0; +} +.btn-group > .btn:first-child:not(:last-child):not(.dropdown-toggle) { + border-top-right-radius: 0; + border-bottom-right-radius: 0; +} +.btn-group > .btn:last-child:not(:first-child), +.btn-group > .dropdown-toggle:not(:first-child) { + border-top-left-radius: 0; + border-bottom-left-radius: 0; +} +.btn-group > .btn-group { + float: left; +} +.btn-group > .btn-group:not(:first-child):not(:last-child) > .btn { + border-radius: 0; +} +.btn-group > .btn-group:first-child:not(:last-child) > .btn:last-child, +.btn-group > .btn-group:first-child:not(:last-child) > .dropdown-toggle { + border-top-right-radius: 0; + border-bottom-right-radius: 0; +} +.btn-group > .btn-group:last-child:not(:first-child) > .btn:first-child { + border-top-left-radius: 0; + border-bottom-left-radius: 0; +} +.btn-group .dropdown-toggle:active, +.btn-group.open .dropdown-toggle { + outline: 0; +} +.btn-group > .btn + .dropdown-toggle { + padding-right: 8px; + padding-left: 8px; +} +.btn-group > .btn-lg + .dropdown-toggle { + padding-right: 12px; + padding-left: 12px; +} +.btn-group.open .dropdown-toggle { + -webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125); + box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125); +} +.btn-group.open .dropdown-toggle.btn-link { + -webkit-box-shadow: none; + box-shadow: none; +} +.btn .caret { + margin-left: 0; +} +.btn-lg .caret { + border-width: 5px 5px 0; + border-bottom-width: 0; +} +.dropup .btn-lg .caret { + border-width: 0 5px 5px; +} +.btn-group-vertical > .btn, +.btn-group-vertical > .btn-group, +.btn-group-vertical > .btn-group > .btn { + display: block; + float: none; + width: 100%; + max-width: 100%; +} +.btn-group-vertical > .btn-group > .btn { + float: none; +} +.btn-group-vertical > .btn + .btn, +.btn-group-vertical > .btn + .btn-group, +.btn-group-vertical > .btn-group + .btn, +.btn-group-vertical > .btn-group + .btn-group { + margin-top: -1px; + margin-left: 0; +} +.btn-group-vertical > .btn:not(:first-child):not(:last-child) { + border-radius: 0; +} +.btn-group-vertical > .btn:first-child:not(:last-child) { + border-top-left-radius: 4px; + border-top-right-radius: 4px; + border-bottom-right-radius: 0; + border-bottom-left-radius: 0; +} +.btn-group-vertical > .btn:last-child:not(:first-child) { + border-top-left-radius: 0; + border-top-right-radius: 0; + border-bottom-right-radius: 4px; + border-bottom-left-radius: 4px; +} +.btn-group-vertical > .btn-group:not(:first-child):not(:last-child) > .btn { + border-radius: 0; +} +.btn-group-vertical > .btn-group:first-child:not(:last-child) > .btn:last-child, +.btn-group-vertical > .btn-group:first-child:not(:last-child) > .dropdown-toggle { + border-bottom-right-radius: 0; + border-bottom-left-radius: 0; +} +.btn-group-vertical > .btn-group:last-child:not(:first-child) > .btn:first-child { + border-top-left-radius: 0; + border-top-right-radius: 0; +} +.btn-group-justified { + display: table; + width: 100%; + table-layout: fixed; + border-collapse: separate; +} +.btn-group-justified > .btn, +.btn-group-justified > .btn-group { + display: table-cell; + float: none; + width: 1%; +} +.btn-group-justified > .btn-group .btn { + width: 100%; +} +.btn-group-justified > .btn-group .dropdown-menu { + left: auto; +} +[data-toggle="buttons"] > .btn input[type="radio"], +[data-toggle="buttons"] > .btn-group > .btn input[type="radio"], +[data-toggle="buttons"] > .btn input[type="checkbox"], +[data-toggle="buttons"] > .btn-group > .btn input[type="checkbox"] { + position: absolute; + clip: rect(0, 0, 0, 0); + pointer-events: none; +} +.input-group { + position: relative; + display: table; + border-collapse: separate; +} +.input-group[class*="col-"] { + float: none; + padding-right: 0; + padding-left: 0; +} +.input-group .form-control { + position: relative; + z-index: 2; + float: left; + width: 100%; + margin-bottom: 0; +} +.input-group .form-control:focus { + z-index: 3; +} +.input-group-lg > .form-control, +.input-group-lg > .input-group-addon, +.input-group-lg > .input-group-btn > .btn { + height: 46px; + padding: 10px 16px; + font-size: 18px; + line-height: 1.3333333; + border-radius: 6px; +} +select.input-group-lg > .form-control, +select.input-group-lg > .input-group-addon, +select.input-group-lg > .input-group-btn > .btn { + height: 46px; + line-height: 46px; +} +textarea.input-group-lg > .form-control, +textarea.input-group-lg > .input-group-addon, +textarea.input-group-lg > .input-group-btn > .btn, +select[multiple].input-group-lg > .form-control, +select[multiple].input-group-lg > .input-group-addon, +select[multiple].input-group-lg > .input-group-btn > .btn { + height: auto; +} +.input-group-sm > .form-control, +.input-group-sm > .input-group-addon, +.input-group-sm > .input-group-btn > .btn { + height: 30px; + padding: 5px 10px; + font-size: 12px; + line-height: 1.5; + border-radius: 3px; +} +select.input-group-sm > .form-control, +select.input-group-sm > .input-group-addon, +select.input-group-sm > .input-group-btn > .btn { + height: 30px; + line-height: 30px; +} +textarea.input-group-sm > .form-control, +textarea.input-group-sm > .input-group-addon, +textarea.input-group-sm > .input-group-btn > .btn, +select[multiple].input-group-sm > .form-control, +select[multiple].input-group-sm > .input-group-addon, +select[multiple].input-group-sm > .input-group-btn > .btn { + height: auto; +} +.input-group-addon, +.input-group-btn, +.input-group .form-control { + display: table-cell; +} +.input-group-addon:not(:first-child):not(:last-child), +.input-group-btn:not(:first-child):not(:last-child), +.input-group .form-control:not(:first-child):not(:last-child) { + border-radius: 0; +} +.input-group-addon, +.input-group-btn { + width: 1%; + white-space: nowrap; + vertical-align: middle; +} +.input-group-addon { + padding: 6px 12px; + font-size: 14px; + font-weight: normal; + line-height: 1; + color: #555; + text-align: center; + background-color: #eee; + border: 1px solid #ccc; + border-radius: 4px; +} +.input-group-addon.input-sm { + padding: 5px 10px; + font-size: 12px; + border-radius: 3px; +} +.input-group-addon.input-lg { + padding: 10px 16px; + font-size: 18px; + border-radius: 6px; +} +.input-group-addon input[type="radio"], +.input-group-addon input[type="checkbox"] { + margin-top: 0; +} +.input-group .form-control:first-child, +.input-group-addon:first-child, +.input-group-btn:first-child > .btn, +.input-group-btn:first-child > .btn-group > .btn, +.input-group-btn:first-child > .dropdown-toggle, +.input-group-btn:last-child > .btn:not(:last-child):not(.dropdown-toggle), +.input-group-btn:last-child > .btn-group:not(:last-child) > .btn { + border-top-right-radius: 0; + border-bottom-right-radius: 0; +} +.input-group-addon:first-child { + border-right: 0; +} +.input-group .form-control:last-child, +.input-group-addon:last-child, +.input-group-btn:last-child > .btn, +.input-group-btn:last-child > .btn-group > .btn, +.input-group-btn:last-child > .dropdown-toggle, +.input-group-btn:first-child > .btn:not(:first-child), +.input-group-btn:first-child > .btn-group:not(:first-child) > .btn { + border-top-left-radius: 0; + border-bottom-left-radius: 0; +} +.input-group-addon:last-child { + border-left: 0; +} +.input-group-btn { + position: relative; + font-size: 0; + white-space: nowrap; +} +.input-group-btn > .btn { + position: relative; +} +.input-group-btn > .btn + .btn { + margin-left: -1px; +} +.input-group-btn > .btn:hover, +.input-group-btn > .btn:focus, +.input-group-btn > .btn:active { + z-index: 2; +} +.input-group-btn:first-child > .btn, +.input-group-btn:first-child > .btn-group { + margin-right: -1px; +} +.input-group-btn:last-child > .btn, +.input-group-btn:last-child > .btn-group { + z-index: 2; + margin-left: -1px; +} +.nav { + padding-left: 0; + margin-bottom: 0; + list-style: none; +} +.nav > li { + position: relative; + display: block; +} +.nav > li > a { + position: relative; + display: block; + padding: 10px 15px; +} +.nav > li > a:hover, +.nav > li > a:focus { + text-decoration: none; + background-color: #eee; +} +.nav > li.disabled > a { + color: #777; +} +.nav > li.disabled > a:hover, +.nav > li.disabled > a:focus { + color: #777; + text-decoration: none; + cursor: not-allowed; + background-color: transparent; +} +.nav .open > a, +.nav .open > a:hover, +.nav .open > a:focus { + background-color: #eee; + border-color: #337ab7; +} +.nav .nav-divider { + height: 1px; + margin: 9px 0; + overflow: hidden; + background-color: #e5e5e5; +} +.nav > li > a > img { + max-width: none; +} +.nav-tabs { + border-bottom: 1px solid #ddd; +} +.nav-tabs > li { + float: left; + margin-bottom: -1px; +} +.nav-tabs > li > a { + margin-right: 2px; + line-height: 1.42857143; + border: 1px solid transparent; + border-radius: 4px 4px 0 0; +} +.nav-tabs > li > a:hover { + border-color: #eee #eee #ddd; +} +.nav-tabs > li.active > a, +.nav-tabs > li.active > a:hover, +.nav-tabs > li.active > a:focus { + color: #555; + cursor: default; + background-color: #fff; + border: 1px solid #ddd; + border-bottom-color: transparent; +} +.nav-tabs.nav-justified { + width: 100%; + border-bottom: 0; +} +.nav-tabs.nav-justified > li { + float: none; +} +.nav-tabs.nav-justified > li > a { + margin-bottom: 5px; + text-align: center; +} +.nav-tabs.nav-justified > .dropdown .dropdown-menu { + top: auto; + left: auto; +} +@media (min-width: 768px) { + .nav-tabs.nav-justified > li { + display: table-cell; + width: 1%; + } + .nav-tabs.nav-justified > li > a { + margin-bottom: 0; + } +} +.nav-tabs.nav-justified > li > a { + margin-right: 0; + border-radius: 4px; +} +.nav-tabs.nav-justified > .active > a, +.nav-tabs.nav-justified > .active > a:hover, +.nav-tabs.nav-justified > .active > a:focus { + border: 1px solid #ddd; +} +@media (min-width: 768px) { + .nav-tabs.nav-justified > li > a { + border-bottom: 1px solid #ddd; + border-radius: 4px 4px 0 0; + } + .nav-tabs.nav-justified > .active > a, + .nav-tabs.nav-justified > .active > a:hover, + .nav-tabs.nav-justified > .active > a:focus { + border-bottom-color: #fff; + } +} +.nav-pills > li { + float: left; +} +.nav-pills > li > a { + border-radius: 4px; +} +.nav-pills > li + li { + margin-left: 2px; +} +.nav-pills > li.active > a, +.nav-pills > li.active > a:hover, +.nav-pills > li.active > a:focus { + color: #fff; + background-color: #337ab7; +} +.nav-stacked > li { + float: none; +} +.nav-stacked > li + li { + margin-top: 2px; + margin-left: 0; +} +.nav-justified { + width: 100%; +} +.nav-justified > li { + float: none; +} +.nav-justified > li > a { + margin-bottom: 5px; + text-align: center; +} +.nav-justified > .dropdown .dropdown-menu { + top: auto; + left: auto; +} +@media (min-width: 768px) { + .nav-justified > li { + display: table-cell; + width: 1%; + } + .nav-justified > li > a { + margin-bottom: 0; + } +} +.nav-tabs-justified { + border-bottom: 0; +} +.nav-tabs-justified > li > a { + margin-right: 0; + border-radius: 4px; +} +.nav-tabs-justified > .active > a, +.nav-tabs-justified > .active > a:hover, +.nav-tabs-justified > .active > a:focus { + border: 1px solid #ddd; +} +@media (min-width: 768px) { + .nav-tabs-justified > li > a { + border-bottom: 1px solid #ddd; + border-radius: 4px 4px 0 0; + } + .nav-tabs-justified > .active > a, + .nav-tabs-justified > .active > a:hover, + .nav-tabs-justified > .active > a:focus { + border-bottom-color: #fff; + } +} +.tab-content > .tab-pane { + display: none; +} +.tab-content > .active { + display: block; +} +.nav-tabs .dropdown-menu { + margin-top: -1px; + border-top-left-radius: 0; + border-top-right-radius: 0; +} +.navbar { + position: relative; + min-height: 50px; + margin-bottom: 20px; + border: 1px solid transparent; +} +@media (min-width: 768px) { + .navbar { + border-radius: 4px; + } +} +@media (min-width: 768px) { + .navbar-header { + float: left; + } +} +.navbar-collapse { + padding-right: 15px; + padding-left: 15px; + overflow-x: visible; + -webkit-overflow-scrolling: touch; + border-top: 1px solid transparent; + -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .1); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, .1); +} +.navbar-collapse.in { + overflow-y: auto; +} +@media (min-width: 768px) { + .navbar-collapse { + width: auto; + border-top: 0; + -webkit-box-shadow: none; + box-shadow: none; + } + .navbar-collapse.collapse { + display: block !important; + height: auto !important; + padding-bottom: 0; + overflow: visible !important; + } + .navbar-collapse.in { + overflow-y: visible; + } + .navbar-fixed-top .navbar-collapse, + .navbar-static-top .navbar-collapse, + .navbar-fixed-bottom .navbar-collapse { + padding-right: 0; + padding-left: 0; + } +} +.navbar-fixed-top .navbar-collapse, +.navbar-fixed-bottom .navbar-collapse { + max-height: 340px; +} +@media (max-device-width: 480px) and (orientation: landscape) { + .navbar-fixed-top .navbar-collapse, + .navbar-fixed-bottom .navbar-collapse { + max-height: 200px; + } +} +.container > .navbar-header, +.container-fluid > .navbar-header, +.container > .navbar-collapse, +.container-fluid > .navbar-collapse { + margin-right: -15px; + margin-left: -15px; +} +@media (min-width: 768px) { + .container > .navbar-header, + .container-fluid > .navbar-header, + .container > .navbar-collapse, + .container-fluid > .navbar-collapse { + margin-right: 0; + margin-left: 0; + } +} +.navbar-static-top { + z-index: 1000; + border-width: 0 0 1px; +} +@media (min-width: 768px) { + .navbar-static-top { + border-radius: 0; + } +} +.navbar-fixed-top, +.navbar-fixed-bottom { + position: fixed; + right: 0; + left: 0; + z-index: 1030; +} +@media (min-width: 768px) { + .navbar-fixed-top, + .navbar-fixed-bottom { + border-radius: 0; + } +} +.navbar-fixed-top { + top: 0; + border-width: 0 0 1px; +} +.navbar-fixed-bottom { + bottom: 0; + margin-bottom: 0; + border-width: 1px 0 0; +} +.navbar-brand { + float: left; + height: 50px; + padding: 15px 15px; + font-size: 18px; + line-height: 20px; +} +.navbar-brand:hover, +.navbar-brand:focus { + text-decoration: none; +} +.navbar-brand > img { + display: block; +} +@media (min-width: 768px) { + .navbar > .container .navbar-brand, + .navbar > .container-fluid .navbar-brand { + margin-left: -15px; + } +} +.navbar-toggle { + position: relative; + float: right; + padding: 9px 10px; + margin-top: 8px; + margin-right: 15px; + margin-bottom: 8px; + background-color: transparent; + background-image: none; + border: 1px solid transparent; + border-radius: 4px; +} +.navbar-toggle:focus { + outline: 0; +} +.navbar-toggle .icon-bar { + display: block; + width: 22px; + height: 2px; + border-radius: 1px; +} +.navbar-toggle .icon-bar + .icon-bar { + margin-top: 4px; +} +@media (min-width: 768px) { + .navbar-toggle { + display: none; + } +} +.navbar-nav { + margin: 7.5px -15px; +} +.navbar-nav > li > a { + padding-top: 10px; + padding-bottom: 10px; + line-height: 20px; +} +@media (max-width: 767px) { + .navbar-nav .open .dropdown-menu { + position: static; + float: none; + width: auto; + margin-top: 0; + background-color: transparent; + border: 0; + -webkit-box-shadow: none; + box-shadow: none; + } + .navbar-nav .open .dropdown-menu > li > a, + .navbar-nav .open .dropdown-menu .dropdown-header { + padding: 5px 15px 5px 25px; + } + .navbar-nav .open .dropdown-menu > li > a { + line-height: 20px; + } + .navbar-nav .open .dropdown-menu > li > a:hover, + .navbar-nav .open .dropdown-menu > li > a:focus { + background-image: none; + } +} +@media (min-width: 768px) { + .navbar-nav { + float: left; + margin: 0; + } + .navbar-nav > li { + float: left; + } + .navbar-nav > li > a { + padding-top: 15px; + padding-bottom: 15px; + } +} +.navbar-form { + padding: 10px 15px; + margin-top: 8px; + margin-right: -15px; + margin-bottom: 8px; + margin-left: -15px; + border-top: 1px solid transparent; + border-bottom: 1px solid transparent; + -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .1), 0 1px 0 rgba(255, 255, 255, .1); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, .1), 0 1px 0 rgba(255, 255, 255, .1); +} +@media (min-width: 768px) { + .navbar-form .form-group { + display: inline-block; + margin-bottom: 0; + vertical-align: middle; + } + .navbar-form .form-control { + display: inline-block; + width: auto; + vertical-align: middle; + } + .navbar-form .form-control-static { + display: inline-block; + } + .navbar-form .input-group { + display: inline-table; + vertical-align: middle; + } + .navbar-form .input-group .input-group-addon, + .navbar-form .input-group .input-group-btn, + .navbar-form .input-group .form-control { + width: auto; + } + .navbar-form .input-group > .form-control { + width: 100%; + } + .navbar-form .control-label { + margin-bottom: 0; + vertical-align: middle; + } + .navbar-form .radio, + .navbar-form .checkbox { + display: inline-block; + margin-top: 0; + margin-bottom: 0; + vertical-align: middle; + } + .navbar-form .radio label, + .navbar-form .checkbox label { + padding-left: 0; + } + .navbar-form .radio input[type="radio"], + .navbar-form .checkbox input[type="checkbox"] { + position: relative; + margin-left: 0; + } + .navbar-form .has-feedback .form-control-feedback { + top: 0; + } +} +@media (max-width: 767px) { + .navbar-form .form-group { + margin-bottom: 5px; + } + .navbar-form .form-group:last-child { + margin-bottom: 0; + } +} +@media (min-width: 768px) { + .navbar-form { + width: auto; + padding-top: 0; + padding-bottom: 0; + margin-right: 0; + margin-left: 0; + border: 0; + -webkit-box-shadow: none; + box-shadow: none; + } +} +.navbar-nav > li > .dropdown-menu { + margin-top: 0; + border-top-left-radius: 0; + border-top-right-radius: 0; +} +.navbar-fixed-bottom .navbar-nav > li > .dropdown-menu { + margin-bottom: 0; + border-top-left-radius: 4px; + border-top-right-radius: 4px; + border-bottom-right-radius: 0; + border-bottom-left-radius: 0; +} +.navbar-btn { + margin-top: 8px; + margin-bottom: 8px; +} +.navbar-btn.btn-sm { + margin-top: 10px; + margin-bottom: 10px; +} +.navbar-btn.btn-xs { + margin-top: 14px; + margin-bottom: 14px; +} +.navbar-text { + margin-top: 15px; + margin-bottom: 15px; +} +@media (min-width: 768px) { + .navbar-text { + float: left; + margin-right: 15px; + margin-left: 15px; + } +} +@media (min-width: 768px) { + .navbar-left { + float: left !important; + } + .navbar-right { + float: right !important; + margin-right: -15px; + } + .navbar-right ~ .navbar-right { + margin-right: 0; + } +} +.navbar-default { + background-color: #f8f8f8; + border-color: #e7e7e7; +} +.navbar-default .navbar-brand { + color: #777; +} +.navbar-default .navbar-brand:hover, +.navbar-default .navbar-brand:focus { + color: #5e5e5e; + background-color: transparent; +} +.navbar-default .navbar-text { + color: #777; +} +.navbar-default .navbar-nav > li > a { + color: #777; +} +.navbar-default .navbar-nav > li > a:hover, +.navbar-default .navbar-nav > li > a:focus { + color: #333; + background-color: transparent; +} +.navbar-default .navbar-nav > .active > a, +.navbar-default .navbar-nav > .active > a:hover, +.navbar-default .navbar-nav > .active > a:focus { + color: #555; + background-color: #e7e7e7; +} +.navbar-default .navbar-nav > .disabled > a, +.navbar-default .navbar-nav > .disabled > a:hover, +.navbar-default .navbar-nav > .disabled > a:focus { + color: #ccc; + background-color: transparent; +} +.navbar-default .navbar-toggle { + border-color: #ddd; +} +.navbar-default .navbar-toggle:hover, +.navbar-default .navbar-toggle:focus { + background-color: #ddd; +} +.navbar-default .navbar-toggle .icon-bar { + background-color: #888; +} +.navbar-default .navbar-collapse, +.navbar-default .navbar-form { + border-color: #e7e7e7; +} +.navbar-default .navbar-nav > .open > a, +.navbar-default .navbar-nav > .open > a:hover, +.navbar-default .navbar-nav > .open > a:focus { + color: #555; + background-color: #e7e7e7; +} +@media (max-width: 767px) { + .navbar-default .navbar-nav .open .dropdown-menu > li > a { + color: #777; + } + .navbar-default .navbar-nav .open .dropdown-menu > li > a:hover, + .navbar-default .navbar-nav .open .dropdown-menu > li > a:focus { + color: #333; + background-color: transparent; + } + .navbar-default .navbar-nav .open .dropdown-menu > .active > a, + .navbar-default .navbar-nav .open .dropdown-menu > .active > a:hover, + .navbar-default .navbar-nav .open .dropdown-menu > .active > a:focus { + color: #555; + background-color: #e7e7e7; + } + .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a, + .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:hover, + .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:focus { + color: #ccc; + background-color: transparent; + } +} +.navbar-default .navbar-link { + color: #777; +} +.navbar-default .navbar-link:hover { + color: #333; +} +.navbar-default .btn-link { + color: #777; +} +.navbar-default .btn-link:hover, +.navbar-default .btn-link:focus { + color: #333; +} +.navbar-default .btn-link[disabled]:hover, +fieldset[disabled] .navbar-default .btn-link:hover, +.navbar-default .btn-link[disabled]:focus, +fieldset[disabled] .navbar-default .btn-link:focus { + color: #ccc; +} +.navbar-inverse { + background-color: #222; + border-color: #080808; +} +.navbar-inverse .navbar-brand { + color: #9d9d9d; +} +.navbar-inverse .navbar-brand:hover, +.navbar-inverse .navbar-brand:focus { + color: #fff; + background-color: transparent; +} +.navbar-inverse .navbar-text { + color: #9d9d9d; +} +.navbar-inverse .navbar-nav > li > a { + color: #9d9d9d; +} +.navbar-inverse .navbar-nav > li > a:hover, +.navbar-inverse .navbar-nav > li > a:focus { + color: #fff; + background-color: transparent; +} +.navbar-inverse .navbar-nav > .active > a, +.navbar-inverse .navbar-nav > .active > a:hover, +.navbar-inverse .navbar-nav > .active > a:focus { + color: #fff; + background-color: #080808; +} +.navbar-inverse .navbar-nav > .disabled > a, +.navbar-inverse .navbar-nav > .disabled > a:hover, +.navbar-inverse .navbar-nav > .disabled > a:focus { + color: #444; + background-color: transparent; +} +.navbar-inverse .navbar-toggle { + border-color: #333; +} +.navbar-inverse .navbar-toggle:hover, +.navbar-inverse .navbar-toggle:focus { + background-color: #333; +} +.navbar-inverse .navbar-toggle .icon-bar { + background-color: #fff; +} +.navbar-inverse .navbar-collapse, +.navbar-inverse .navbar-form { + border-color: #101010; +} +.navbar-inverse .navbar-nav > .open > a, +.navbar-inverse .navbar-nav > .open > a:hover, +.navbar-inverse .navbar-nav > .open > a:focus { + color: #fff; + background-color: #080808; +} +@media (max-width: 767px) { + .navbar-inverse .navbar-nav .open .dropdown-menu > .dropdown-header { + border-color: #080808; + } + .navbar-inverse .navbar-nav .open .dropdown-menu .divider { + background-color: #080808; + } + .navbar-inverse .navbar-nav .open .dropdown-menu > li > a { + color: #9d9d9d; + } + .navbar-inverse .navbar-nav .open .dropdown-menu > li > a:hover, + .navbar-inverse .navbar-nav .open .dropdown-menu > li > a:focus { + color: #fff; + background-color: transparent; + } + .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a, + .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:hover, + .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:focus { + color: #fff; + background-color: #080808; + } + .navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a, + .navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a:hover, + .navbar-inverse .navbar-nav .open .dropdown-menu > .disabled > a:focus { + color: #444; + background-color: transparent; + } +} +.navbar-inverse .navbar-link { + color: #9d9d9d; +} +.navbar-inverse .navbar-link:hover { + color: #fff; +} +.navbar-inverse .btn-link { + color: #9d9d9d; +} +.navbar-inverse .btn-link:hover, +.navbar-inverse .btn-link:focus { + color: #fff; +} +.navbar-inverse .btn-link[disabled]:hover, +fieldset[disabled] .navbar-inverse .btn-link:hover, +.navbar-inverse .btn-link[disabled]:focus, +fieldset[disabled] .navbar-inverse .btn-link:focus { + color: #444; +} +.breadcrumb { + padding: 8px 15px; + margin-bottom: 20px; + list-style: none; + background-color: #f5f5f5; + border-radius: 4px; +} +.breadcrumb > li { + display: inline-block; +} +.breadcrumb > li + li:before { + padding: 0 5px; + color: #ccc; + content: "/\00a0"; +} +.breadcrumb > .active { + color: #777; +} +.pagination { + display: inline-block; + padding-left: 0; + margin: 20px 0; + border-radius: 4px; +} +.pagination > li { + display: inline; +} +.pagination > li > a, +.pagination > li > span { + position: relative; + float: left; + padding: 6px 12px; + margin-left: -1px; + line-height: 1.42857143; + color: #337ab7; + text-decoration: none; + background-color: #fff; + border: 1px solid #ddd; +} +.pagination > li:first-child > a, +.pagination > li:first-child > span { + margin-left: 0; + border-top-left-radius: 4px; + border-bottom-left-radius: 4px; +} +.pagination > li:last-child > a, +.pagination > li:last-child > span { + border-top-right-radius: 4px; + border-bottom-right-radius: 4px; +} +.pagination > li > a:hover, +.pagination > li > span:hover, +.pagination > li > a:focus, +.pagination > li > span:focus { + z-index: 2; + color: #23527c; + background-color: #eee; + border-color: #ddd; +} +.pagination > .active > a, +.pagination > .active > span, +.pagination > .active > a:hover, +.pagination > .active > span:hover, +.pagination > .active > a:focus, +.pagination > .active > span:focus { + z-index: 3; + color: #fff; + cursor: default; + background-color: #337ab7; + border-color: #337ab7; +} +.pagination > .disabled > span, +.pagination > .disabled > span:hover, +.pagination > .disabled > span:focus, +.pagination > .disabled > a, +.pagination > .disabled > a:hover, +.pagination > .disabled > a:focus { + color: #777; + cursor: not-allowed; + background-color: #fff; + border-color: #ddd; +} +.pagination-lg > li > a, +.pagination-lg > li > span { + padding: 10px 16px; + font-size: 18px; + line-height: 1.3333333; +} +.pagination-lg > li:first-child > a, +.pagination-lg > li:first-child > span { + border-top-left-radius: 6px; + border-bottom-left-radius: 6px; +} +.pagination-lg > li:last-child > a, +.pagination-lg > li:last-child > span { + border-top-right-radius: 6px; + border-bottom-right-radius: 6px; +} +.pagination-sm > li > a, +.pagination-sm > li > span { + padding: 5px 10px; + font-size: 12px; + line-height: 1.5; +} +.pagination-sm > li:first-child > a, +.pagination-sm > li:first-child > span { + border-top-left-radius: 3px; + border-bottom-left-radius: 3px; +} +.pagination-sm > li:last-child > a, +.pagination-sm > li:last-child > span { + border-top-right-radius: 3px; + border-bottom-right-radius: 3px; +} +.pager { + padding-left: 0; + margin: 20px 0; + text-align: center; + list-style: none; +} +.pager li { + display: inline; +} +.pager li > a, +.pager li > span { + display: inline-block; + padding: 5px 14px; + background-color: #fff; + border: 1px solid #ddd; + border-radius: 15px; +} +.pager li > a:hover, +.pager li > a:focus { + text-decoration: none; + background-color: #eee; +} +.pager .next > a, +.pager .next > span { + float: right; +} +.pager .previous > a, +.pager .previous > span { + float: left; +} +.pager .disabled > a, +.pager .disabled > a:hover, +.pager .disabled > a:focus, +.pager .disabled > span { + color: #777; + cursor: not-allowed; + background-color: #fff; +} +.label { + display: inline; + padding: .2em .6em .3em; + font-size: 75%; + font-weight: bold; + line-height: 1; + color: #fff; + text-align: center; + white-space: nowrap; + vertical-align: baseline; + border-radius: .25em; +} +a.label:hover, +a.label:focus { + color: #fff; + text-decoration: none; + cursor: pointer; +} +.label:empty { + display: none; +} +.btn .label { + position: relative; + top: -1px; +} +.label-default { + background-color: #777; +} +.label-default[href]:hover, +.label-default[href]:focus { + background-color: #5e5e5e; +} +.label-primary { + background-color: #337ab7; +} +.label-primary[href]:hover, +.label-primary[href]:focus { + background-color: #286090; +} +.label-success { + background-color: #5cb85c; +} +.label-success[href]:hover, +.label-success[href]:focus { + background-color: #449d44; +} +.label-info { + background-color: #5bc0de; +} +.label-info[href]:hover, +.label-info[href]:focus { + background-color: #31b0d5; +} +.label-warning { + background-color: #f0ad4e; +} +.label-warning[href]:hover, +.label-warning[href]:focus { + background-color: #ec971f; +} +.label-danger { + background-color: #d9534f; +} +.label-danger[href]:hover, +.label-danger[href]:focus { + background-color: #c9302c; +} +.badge { + display: inline-block; + min-width: 10px; + padding: 3px 7px; + font-size: 12px; + font-weight: bold; + line-height: 1; + color: #fff; + text-align: center; + white-space: nowrap; + vertical-align: middle; + background-color: #777; + border-radius: 10px; +} +.badge:empty { + display: none; +} +.btn .badge { + position: relative; + top: -1px; +} +.btn-xs .badge, +.btn-group-xs > .btn .badge { + top: 0; + padding: 1px 5px; +} +a.badge:hover, +a.badge:focus { + color: #fff; + text-decoration: none; + cursor: pointer; +} +.list-group-item.active > .badge, +.nav-pills > .active > a > .badge { + color: #337ab7; + background-color: #fff; +} +.list-group-item > .badge { + float: right; +} +.list-group-item > .badge + .badge { + margin-right: 5px; +} +.nav-pills > li > a > .badge { + margin-left: 3px; +} +.jumbotron { + padding-top: 30px; + padding-bottom: 30px; + margin-bottom: 30px; + color: inherit; + background-color: #eee; +} +.jumbotron h1, +.jumbotron .h1 { + color: inherit; +} +.jumbotron p { + margin-bottom: 15px; + font-size: 21px; + font-weight: 200; +} +.jumbotron > hr { + border-top-color: #d5d5d5; +} +.container .jumbotron, +.container-fluid .jumbotron { + padding-right: 15px; + padding-left: 15px; + border-radius: 6px; +} +.jumbotron .container { + max-width: 100%; +} +@media screen and (min-width: 768px) { + .jumbotron { + padding-top: 48px; + padding-bottom: 48px; + } + .container .jumbotron, + .container-fluid .jumbotron { + padding-right: 60px; + padding-left: 60px; + } + .jumbotron h1, + .jumbotron .h1 { + font-size: 63px; + } +} +.thumbnail { + display: block; + padding: 4px; + margin-bottom: 20px; + line-height: 1.42857143; + background-color: #fff; + border: 1px solid #ddd; + border-radius: 4px; + -webkit-transition: border .2s ease-in-out; + -o-transition: border .2s ease-in-out; + transition: border .2s ease-in-out; +} +.thumbnail > img, +.thumbnail a > img { + margin-right: auto; + margin-left: auto; +} +a.thumbnail:hover, +a.thumbnail:focus, +a.thumbnail.active { + border-color: #337ab7; +} +.thumbnail .caption { + padding: 9px; + color: #333; +} +.alert { + padding: 15px; + margin-bottom: 20px; + border: 1px solid transparent; + border-radius: 4px; +} +.alert h4 { + margin-top: 0; + color: inherit; +} +.alert .alert-link { + font-weight: bold; +} +.alert > p, +.alert > ul { + margin-bottom: 0; +} +.alert > p + p { + margin-top: 5px; +} +.alert-dismissable, +.alert-dismissible { + padding-right: 35px; +} +.alert-dismissable .close, +.alert-dismissible .close { + position: relative; + top: -2px; + right: -21px; + color: inherit; +} +.alert-success { + color: #3c763d; + background-color: #dff0d8; + border-color: #d6e9c6; +} +.alert-success hr { + border-top-color: #c9e2b3; +} +.alert-success .alert-link { + color: #2b542c; +} +.alert-info { + color: #31708f; + background-color: #d9edf7; + border-color: #bce8f1; +} +.alert-info hr { + border-top-color: #a6e1ec; +} +.alert-info .alert-link { + color: #245269; +} +.alert-warning { + color: #8a6d3b; + background-color: #fcf8e3; + border-color: #faebcc; +} +.alert-warning hr { + border-top-color: #f7e1b5; +} +.alert-warning .alert-link { + color: #66512c; +} +.alert-danger { + color: #a94442; + background-color: #f2dede; + border-color: #ebccd1; +} +.alert-danger hr { + border-top-color: #e4b9c0; +} +.alert-danger .alert-link { + color: #843534; +} +@-webkit-keyframes progress-bar-stripes { + from { + background-position: 40px 0; + } + to { + background-position: 0 0; + } +} +@-o-keyframes progress-bar-stripes { + from { + background-position: 40px 0; + } + to { + background-position: 0 0; + } +} +@keyframes progress-bar-stripes { + from { + background-position: 40px 0; + } + to { + background-position: 0 0; + } +} +.progress { + height: 20px; + margin-bottom: 20px; + overflow: hidden; + background-color: #f5f5f5; + border-radius: 4px; + -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, .1); + box-shadow: inset 0 1px 2px rgba(0, 0, 0, .1); +} +.progress-bar { + float: left; + width: 0; + height: 100%; + font-size: 12px; + line-height: 20px; + color: #fff; + text-align: center; + background-color: #337ab7; + -webkit-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, .15); + box-shadow: inset 0 -1px 0 rgba(0, 0, 0, .15); + -webkit-transition: width .6s ease; + -o-transition: width .6s ease; + transition: width .6s ease; +} +.progress-striped .progress-bar, +.progress-bar-striped { + background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + -webkit-background-size: 40px 40px; + background-size: 40px 40px; +} +.progress.active .progress-bar, +.progress-bar.active { + -webkit-animation: progress-bar-stripes 2s linear infinite; + -o-animation: progress-bar-stripes 2s linear infinite; + animation: progress-bar-stripes 2s linear infinite; +} +.progress-bar-success { + background-color: #5cb85c; +} +.progress-striped .progress-bar-success { + background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); +} +.progress-bar-info { + background-color: #5bc0de; +} +.progress-striped .progress-bar-info { + background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); +} +.progress-bar-warning { + background-color: #f0ad4e; +} +.progress-striped .progress-bar-warning { + background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); +} +.progress-bar-danger { + background-color: #d9534f; +} +.progress-striped .progress-bar-danger { + background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); + background-image: linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent); +} +.media { + margin-top: 15px; +} +.media:first-child { + margin-top: 0; +} +.media, +.media-body { + overflow: hidden; + zoom: 1; +} +.media-body { + width: 10000px; +} +.media-object { + display: block; +} +.media-object.img-thumbnail { + max-width: none; +} +.media-right, +.media > .pull-right { + padding-left: 10px; +} +.media-left, +.media > .pull-left { + padding-right: 10px; +} +.media-left, +.media-right, +.media-body { + display: table-cell; + vertical-align: top; +} +.media-middle { + vertical-align: middle; +} +.media-bottom { + vertical-align: bottom; +} +.media-heading { + margin-top: 0; + margin-bottom: 5px; +} +.media-list { + padding-left: 0; + list-style: none; +} +.list-group { + padding-left: 0; + margin-bottom: 20px; +} +.list-group-item { + position: relative; + display: block; + padding: 10px 15px; + margin-bottom: -1px; + background-color: #fff; + border: 1px solid #ddd; +} +.list-group-item:first-child { + border-top-left-radius: 4px; + border-top-right-radius: 4px; +} +.list-group-item:last-child { + margin-bottom: 0; + border-bottom-right-radius: 4px; + border-bottom-left-radius: 4px; +} +a.list-group-item, +button.list-group-item { + color: #555; +} +a.list-group-item .list-group-item-heading, +button.list-group-item .list-group-item-heading { + color: #333; +} +a.list-group-item:hover, +button.list-group-item:hover, +a.list-group-item:focus, +button.list-group-item:focus { + color: #555; + text-decoration: none; + background-color: #f5f5f5; +} +button.list-group-item { + width: 100%; + text-align: left; +} +.list-group-item.disabled, +.list-group-item.disabled:hover, +.list-group-item.disabled:focus { + color: #777; + cursor: not-allowed; + background-color: #eee; +} +.list-group-item.disabled .list-group-item-heading, +.list-group-item.disabled:hover .list-group-item-heading, +.list-group-item.disabled:focus .list-group-item-heading { + color: inherit; +} +.list-group-item.disabled .list-group-item-text, +.list-group-item.disabled:hover .list-group-item-text, +.list-group-item.disabled:focus .list-group-item-text { + color: #777; +} +.list-group-item.active, +.list-group-item.active:hover, +.list-group-item.active:focus { + z-index: 2; + color: #fff; + background-color: #337ab7; + border-color: #337ab7; +} +.list-group-item.active .list-group-item-heading, +.list-group-item.active:hover .list-group-item-heading, +.list-group-item.active:focus .list-group-item-heading, +.list-group-item.active .list-group-item-heading > small, +.list-group-item.active:hover .list-group-item-heading > small, +.list-group-item.active:focus .list-group-item-heading > small, +.list-group-item.active .list-group-item-heading > .small, +.list-group-item.active:hover .list-group-item-heading > .small, +.list-group-item.active:focus .list-group-item-heading > .small { + color: inherit; +} +.list-group-item.active .list-group-item-text, +.list-group-item.active:hover .list-group-item-text, +.list-group-item.active:focus .list-group-item-text { + color: #c7ddef; +} +.list-group-item-success { + color: #3c763d; + background-color: #dff0d8; +} +a.list-group-item-success, +button.list-group-item-success { + color: #3c763d; +} +a.list-group-item-success .list-group-item-heading, +button.list-group-item-success .list-group-item-heading { + color: inherit; +} +a.list-group-item-success:hover, +button.list-group-item-success:hover, +a.list-group-item-success:focus, +button.list-group-item-success:focus { + color: #3c763d; + background-color: #d0e9c6; +} +a.list-group-item-success.active, +button.list-group-item-success.active, +a.list-group-item-success.active:hover, +button.list-group-item-success.active:hover, +a.list-group-item-success.active:focus, +button.list-group-item-success.active:focus { + color: #fff; + background-color: #3c763d; + border-color: #3c763d; +} +.list-group-item-info { + color: #31708f; + background-color: #d9edf7; +} +a.list-group-item-info, +button.list-group-item-info { + color: #31708f; +} +a.list-group-item-info .list-group-item-heading, +button.list-group-item-info .list-group-item-heading { + color: inherit; +} +a.list-group-item-info:hover, +button.list-group-item-info:hover, +a.list-group-item-info:focus, +button.list-group-item-info:focus { + color: #31708f; + background-color: #c4e3f3; +} +a.list-group-item-info.active, +button.list-group-item-info.active, +a.list-group-item-info.active:hover, +button.list-group-item-info.active:hover, +a.list-group-item-info.active:focus, +button.list-group-item-info.active:focus { + color: #fff; + background-color: #31708f; + border-color: #31708f; +} +.list-group-item-warning { + color: #8a6d3b; + background-color: #fcf8e3; +} +a.list-group-item-warning, +button.list-group-item-warning { + color: #8a6d3b; +} +a.list-group-item-warning .list-group-item-heading, +button.list-group-item-warning .list-group-item-heading { + color: inherit; +} +a.list-group-item-warning:hover, +button.list-group-item-warning:hover, +a.list-group-item-warning:focus, +button.list-group-item-warning:focus { + color: #8a6d3b; + background-color: #faf2cc; +} +a.list-group-item-warning.active, +button.list-group-item-warning.active, +a.list-group-item-warning.active:hover, +button.list-group-item-warning.active:hover, +a.list-group-item-warning.active:focus, +button.list-group-item-warning.active:focus { + color: #fff; + background-color: #8a6d3b; + border-color: #8a6d3b; +} +.list-group-item-danger { + color: #a94442; + background-color: #f2dede; +} +a.list-group-item-danger, +button.list-group-item-danger { + color: #a94442; +} +a.list-group-item-danger .list-group-item-heading, +button.list-group-item-danger .list-group-item-heading { + color: inherit; +} +a.list-group-item-danger:hover, +button.list-group-item-danger:hover, +a.list-group-item-danger:focus, +button.list-group-item-danger:focus { + color: #a94442; + background-color: #ebcccc; +} +a.list-group-item-danger.active, +button.list-group-item-danger.active, +a.list-group-item-danger.active:hover, +button.list-group-item-danger.active:hover, +a.list-group-item-danger.active:focus, +button.list-group-item-danger.active:focus { + color: #fff; + background-color: #a94442; + border-color: #a94442; +} +.list-group-item-heading { + margin-top: 0; + margin-bottom: 5px; +} +.list-group-item-text { + margin-bottom: 0; + line-height: 1.3; +} +.panel { + margin-bottom: 20px; + background-color: #fff; + border: 1px solid transparent; + border-radius: 4px; + -webkit-box-shadow: 0 1px 1px rgba(0, 0, 0, .05); + box-shadow: 0 1px 1px rgba(0, 0, 0, .05); +} +.panel-body { + padding: 15px; +} +.panel-heading { + padding: 10px 15px; + border-bottom: 1px solid transparent; + border-top-left-radius: 3px; + border-top-right-radius: 3px; +} +.panel-heading > .dropdown .dropdown-toggle { + color: inherit; +} +.panel-title { + margin-top: 0; + margin-bottom: 0; + font-size: 16px; + color: inherit; +} +.panel-title > a, +.panel-title > small, +.panel-title > .small, +.panel-title > small > a, +.panel-title > .small > a { + color: inherit; +} +.panel-footer { + padding: 10px 15px; + background-color: #f5f5f5; + border-top: 1px solid #ddd; + border-bottom-right-radius: 3px; + border-bottom-left-radius: 3px; +} +.panel > .list-group, +.panel > .panel-collapse > .list-group { + margin-bottom: 0; +} +.panel > .list-group .list-group-item, +.panel > .panel-collapse > .list-group .list-group-item { + border-width: 1px 0; + border-radius: 0; +} +.panel > .list-group:first-child .list-group-item:first-child, +.panel > .panel-collapse > .list-group:first-child .list-group-item:first-child { + border-top: 0; + border-top-left-radius: 3px; + border-top-right-radius: 3px; +} +.panel > .list-group:last-child .list-group-item:last-child, +.panel > .panel-collapse > .list-group:last-child .list-group-item:last-child { + border-bottom: 0; + border-bottom-right-radius: 3px; + border-bottom-left-radius: 3px; +} +.panel > .panel-heading + .panel-collapse > .list-group .list-group-item:first-child { + border-top-left-radius: 0; + border-top-right-radius: 0; +} +.panel-heading + .list-group .list-group-item:first-child { + border-top-width: 0; +} +.list-group + .panel-footer { + border-top-width: 0; +} +.panel > .table, +.panel > .table-responsive > .table, +.panel > .panel-collapse > .table { + margin-bottom: 0; +} +.panel > .table caption, +.panel > .table-responsive > .table caption, +.panel > .panel-collapse > .table caption { + padding-right: 15px; + padding-left: 15px; +} +.panel > .table:first-child, +.panel > .table-responsive:first-child > .table:first-child { + border-top-left-radius: 3px; + border-top-right-radius: 3px; +} +.panel > .table:first-child > thead:first-child > tr:first-child, +.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child, +.panel > .table:first-child > tbody:first-child > tr:first-child, +.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child { + border-top-left-radius: 3px; + border-top-right-radius: 3px; +} +.panel > .table:first-child > thead:first-child > tr:first-child td:first-child, +.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child td:first-child, +.panel > .table:first-child > tbody:first-child > tr:first-child td:first-child, +.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child td:first-child, +.panel > .table:first-child > thead:first-child > tr:first-child th:first-child, +.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child th:first-child, +.panel > .table:first-child > tbody:first-child > tr:first-child th:first-child, +.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child th:first-child { + border-top-left-radius: 3px; +} +.panel > .table:first-child > thead:first-child > tr:first-child td:last-child, +.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child td:last-child, +.panel > .table:first-child > tbody:first-child > tr:first-child td:last-child, +.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child td:last-child, +.panel > .table:first-child > thead:first-child > tr:first-child th:last-child, +.panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child th:last-child, +.panel > .table:first-child > tbody:first-child > tr:first-child th:last-child, +.panel > .table-responsive:first-child > .table:first-child > tbody:first-child > tr:first-child th:last-child { + border-top-right-radius: 3px; +} +.panel > .table:last-child, +.panel > .table-responsive:last-child > .table:last-child { + border-bottom-right-radius: 3px; + border-bottom-left-radius: 3px; +} +.panel > .table:last-child > tbody:last-child > tr:last-child, +.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child, +.panel > .table:last-child > tfoot:last-child > tr:last-child, +.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child { + border-bottom-right-radius: 3px; + border-bottom-left-radius: 3px; +} +.panel > .table:last-child > tbody:last-child > tr:last-child td:first-child, +.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child td:first-child, +.panel > .table:last-child > tfoot:last-child > tr:last-child td:first-child, +.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child td:first-child, +.panel > .table:last-child > tbody:last-child > tr:last-child th:first-child, +.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child th:first-child, +.panel > .table:last-child > tfoot:last-child > tr:last-child th:first-child, +.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child th:first-child { + border-bottom-left-radius: 3px; +} +.panel > .table:last-child > tbody:last-child > tr:last-child td:last-child, +.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child td:last-child, +.panel > .table:last-child > tfoot:last-child > tr:last-child td:last-child, +.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child td:last-child, +.panel > .table:last-child > tbody:last-child > tr:last-child th:last-child, +.panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child th:last-child, +.panel > .table:last-child > tfoot:last-child > tr:last-child th:last-child, +.panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child th:last-child { + border-bottom-right-radius: 3px; +} +.panel > .panel-body + .table, +.panel > .panel-body + .table-responsive, +.panel > .table + .panel-body, +.panel > .table-responsive + .panel-body { + border-top: 1px solid #ddd; +} +.panel > .table > tbody:first-child > tr:first-child th, +.panel > .table > tbody:first-child > tr:first-child td { + border-top: 0; +} +.panel > .table-bordered, +.panel > .table-responsive > .table-bordered { + border: 0; +} +.panel > .table-bordered > thead > tr > th:first-child, +.panel > .table-responsive > .table-bordered > thead > tr > th:first-child, +.panel > .table-bordered > tbody > tr > th:first-child, +.panel > .table-responsive > .table-bordered > tbody > tr > th:first-child, +.panel > .table-bordered > tfoot > tr > th:first-child, +.panel > .table-responsive > .table-bordered > tfoot > tr > th:first-child, +.panel > .table-bordered > thead > tr > td:first-child, +.panel > .table-responsive > .table-bordered > thead > tr > td:first-child, +.panel > .table-bordered > tbody > tr > td:first-child, +.panel > .table-responsive > .table-bordered > tbody > tr > td:first-child, +.panel > .table-bordered > tfoot > tr > td:first-child, +.panel > .table-responsive > .table-bordered > tfoot > tr > td:first-child { + border-left: 0; +} +.panel > .table-bordered > thead > tr > th:last-child, +.panel > .table-responsive > .table-bordered > thead > tr > th:last-child, +.panel > .table-bordered > tbody > tr > th:last-child, +.panel > .table-responsive > .table-bordered > tbody > tr > th:last-child, +.panel > .table-bordered > tfoot > tr > th:last-child, +.panel > .table-responsive > .table-bordered > tfoot > tr > th:last-child, +.panel > .table-bordered > thead > tr > td:last-child, +.panel > .table-responsive > .table-bordered > thead > tr > td:last-child, +.panel > .table-bordered > tbody > tr > td:last-child, +.panel > .table-responsive > .table-bordered > tbody > tr > td:last-child, +.panel > .table-bordered > tfoot > tr > td:last-child, +.panel > .table-responsive > .table-bordered > tfoot > tr > td:last-child { + border-right: 0; +} +.panel > .table-bordered > thead > tr:first-child > td, +.panel > .table-responsive > .table-bordered > thead > tr:first-child > td, +.panel > .table-bordered > tbody > tr:first-child > td, +.panel > .table-responsive > .table-bordered > tbody > tr:first-child > td, +.panel > .table-bordered > thead > tr:first-child > th, +.panel > .table-responsive > .table-bordered > thead > tr:first-child > th, +.panel > .table-bordered > tbody > tr:first-child > th, +.panel > .table-responsive > .table-bordered > tbody > tr:first-child > th { + border-bottom: 0; +} +.panel > .table-bordered > tbody > tr:last-child > td, +.panel > .table-responsive > .table-bordered > tbody > tr:last-child > td, +.panel > .table-bordered > tfoot > tr:last-child > td, +.panel > .table-responsive > .table-bordered > tfoot > tr:last-child > td, +.panel > .table-bordered > tbody > tr:last-child > th, +.panel > .table-responsive > .table-bordered > tbody > tr:last-child > th, +.panel > .table-bordered > tfoot > tr:last-child > th, +.panel > .table-responsive > .table-bordered > tfoot > tr:last-child > th { + border-bottom: 0; +} +.panel > .table-responsive { + margin-bottom: 0; + border: 0; +} +.panel-group { + margin-bottom: 20px; +} +.panel-group .panel { + margin-bottom: 0; + border-radius: 4px; +} +.panel-group .panel + .panel { + margin-top: 5px; +} +.panel-group .panel-heading { + border-bottom: 0; +} +.panel-group .panel-heading + .panel-collapse > .panel-body, +.panel-group .panel-heading + .panel-collapse > .list-group { + border-top: 1px solid #ddd; +} +.panel-group .panel-footer { + border-top: 0; +} +.panel-group .panel-footer + .panel-collapse .panel-body { + border-bottom: 1px solid #ddd; +} +.panel-default { + border-color: #ddd; +} +.panel-default > .panel-heading { + color: #333; + background-color: #f5f5f5; + border-color: #ddd; +} +.panel-default > .panel-heading + .panel-collapse > .panel-body { + border-top-color: #ddd; +} +.panel-default > .panel-heading .badge { + color: #f5f5f5; + background-color: #333; +} +.panel-default > .panel-footer + .panel-collapse > .panel-body { + border-bottom-color: #ddd; +} +.panel-primary { + border-color: #337ab7; +} +.panel-primary > .panel-heading { + color: #fff; + background-color: #337ab7; + border-color: #337ab7; +} +.panel-primary > .panel-heading + .panel-collapse > .panel-body { + border-top-color: #337ab7; +} +.panel-primary > .panel-heading .badge { + color: #337ab7; + background-color: #fff; +} +.panel-primary > .panel-footer + .panel-collapse > .panel-body { + border-bottom-color: #337ab7; +} +.panel-success { + border-color: #d6e9c6; +} +.panel-success > .panel-heading { + color: #3c763d; + background-color: #dff0d8; + border-color: #d6e9c6; +} +.panel-success > .panel-heading + .panel-collapse > .panel-body { + border-top-color: #d6e9c6; +} +.panel-success > .panel-heading .badge { + color: #dff0d8; + background-color: #3c763d; +} +.panel-success > .panel-footer + .panel-collapse > .panel-body { + border-bottom-color: #d6e9c6; +} +.panel-info { + border-color: #bce8f1; +} +.panel-info > .panel-heading { + color: #31708f; + background-color: #d9edf7; + border-color: #bce8f1; +} +.panel-info > .panel-heading + .panel-collapse > .panel-body { + border-top-color: #bce8f1; +} +.panel-info > .panel-heading .badge { + color: #d9edf7; + background-color: #31708f; +} +.panel-info > .panel-footer + .panel-collapse > .panel-body { + border-bottom-color: #bce8f1; +} +.panel-warning { + border-color: #faebcc; +} +.panel-warning > .panel-heading { + color: #8a6d3b; + background-color: #fcf8e3; + border-color: #faebcc; +} +.panel-warning > .panel-heading + .panel-collapse > .panel-body { + border-top-color: #faebcc; +} +.panel-warning > .panel-heading .badge { + color: #fcf8e3; + background-color: #8a6d3b; +} +.panel-warning > .panel-footer + .panel-collapse > .panel-body { + border-bottom-color: #faebcc; +} +.panel-danger { + border-color: #ebccd1; +} +.panel-danger > .panel-heading { + color: #a94442; + background-color: #f2dede; + border-color: #ebccd1; +} +.panel-danger > .panel-heading + .panel-collapse > .panel-body { + border-top-color: #ebccd1; +} +.panel-danger > .panel-heading .badge { + color: #f2dede; + background-color: #a94442; +} +.panel-danger > .panel-footer + .panel-collapse > .panel-body { + border-bottom-color: #ebccd1; +} +.embed-responsive { + position: relative; + display: block; + height: 0; + padding: 0; + overflow: hidden; +} +.embed-responsive .embed-responsive-item, +.embed-responsive iframe, +.embed-responsive embed, +.embed-responsive object, +.embed-responsive video { + position: absolute; + top: 0; + bottom: 0; + left: 0; + width: 100%; + height: 100%; + border: 0; +} +.embed-responsive-16by9 { + padding-bottom: 56.25%; +} +.embed-responsive-4by3 { + padding-bottom: 75%; +} +.well { + min-height: 20px; + padding: 19px; + margin-bottom: 20px; + background-color: #f5f5f5; + border: 1px solid #e3e3e3; + border-radius: 4px; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05); +} +.well blockquote { + border-color: #ddd; + border-color: rgba(0, 0, 0, .15); +} +.well-lg { + padding: 24px; + border-radius: 6px; +} +.well-sm { + padding: 9px; + border-radius: 3px; +} +.close { + float: right; + font-size: 21px; + font-weight: bold; + line-height: 1; + color: #000; + text-shadow: 0 1px 0 #fff; + filter: alpha(opacity=20); + opacity: .2; +} +.close:hover, +.close:focus { + color: #000; + text-decoration: none; + cursor: pointer; + filter: alpha(opacity=50); + opacity: .5; +} +button.close { + -webkit-appearance: none; + padding: 0; + cursor: pointer; + background: transparent; + border: 0; +} +.modal-open { + overflow: hidden; +} +.modal { + position: fixed; + top: 0; + right: 0; + bottom: 0; + left: 0; + z-index: 1050; + display: none; + overflow: hidden; + -webkit-overflow-scrolling: touch; + outline: 0; +} +.modal.fade .modal-dialog { + -webkit-transition: -webkit-transform .3s ease-out; + -o-transition: -o-transform .3s ease-out; + transition: transform .3s ease-out; + -webkit-transform: translate(0, -25%); + -ms-transform: translate(0, -25%); + -o-transform: translate(0, -25%); + transform: translate(0, -25%); +} +.modal.in .modal-dialog { + -webkit-transform: translate(0, 0); + -ms-transform: translate(0, 0); + -o-transform: translate(0, 0); + transform: translate(0, 0); +} +.modal-open .modal { + overflow-x: hidden; + overflow-y: auto; +} +.modal-dialog { + position: relative; + width: auto; + margin: 10px; +} +.modal-content { + position: relative; + background-color: #fff; + -webkit-background-clip: padding-box; + background-clip: padding-box; + border: 1px solid #999; + border: 1px solid rgba(0, 0, 0, .2); + border-radius: 6px; + outline: 0; + -webkit-box-shadow: 0 3px 9px rgba(0, 0, 0, .5); + box-shadow: 0 3px 9px rgba(0, 0, 0, .5); +} +.modal-backdrop { + position: fixed; + top: 0; + right: 0; + bottom: 0; + left: 0; + z-index: 1040; + background-color: #000; +} +.modal-backdrop.fade { + filter: alpha(opacity=0); + opacity: 0; +} +.modal-backdrop.in { + filter: alpha(opacity=50); + opacity: .5; +} +.modal-header { + padding: 15px; + border-bottom: 1px solid #e5e5e5; +} +.modal-header .close { + margin-top: -2px; +} +.modal-title { + margin: 0; + line-height: 1.42857143; +} +.modal-body { + position: relative; + padding: 15px; +} +.modal-footer { + padding: 15px; + text-align: right; + border-top: 1px solid #e5e5e5; +} +.modal-footer .btn + .btn { + margin-bottom: 0; + margin-left: 5px; +} +.modal-footer .btn-group .btn + .btn { + margin-left: -1px; +} +.modal-footer .btn-block + .btn-block { + margin-left: 0; +} +.modal-scrollbar-measure { + position: absolute; + top: -9999px; + width: 50px; + height: 50px; + overflow: scroll; +} +@media (min-width: 768px) { + .modal-dialog { + width: 600px; + margin: 30px auto; + } + .modal-content { + -webkit-box-shadow: 0 5px 15px rgba(0, 0, 0, .5); + box-shadow: 0 5px 15px rgba(0, 0, 0, .5); + } + .modal-sm { + width: 300px; + } +} +@media (min-width: 992px) { + .modal-lg { + width: 900px; + } +} +.tooltip { + position: absolute; + z-index: 1070; + display: block; + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 12px; + font-style: normal; + font-weight: normal; + line-height: 1.42857143; + text-align: left; + text-align: start; + text-decoration: none; + text-shadow: none; + text-transform: none; + letter-spacing: normal; + word-break: normal; + word-spacing: normal; + word-wrap: normal; + white-space: normal; + filter: alpha(opacity=0); + opacity: 0; + + line-break: auto; +} +.tooltip.in { + filter: alpha(opacity=90); + opacity: .9; +} +.tooltip.top { + padding: 5px 0; + margin-top: -3px; +} +.tooltip.right { + padding: 0 5px; + margin-left: 3px; +} +.tooltip.bottom { + padding: 5px 0; + margin-top: 3px; +} +.tooltip.left { + padding: 0 5px; + margin-left: -3px; +} +.tooltip-inner { + max-width: 200px; + padding: 3px 8px; + color: #fff; + text-align: center; + background-color: #000; + border-radius: 4px; +} +.tooltip-arrow { + position: absolute; + width: 0; + height: 0; + border-color: transparent; + border-style: solid; +} +.tooltip.top .tooltip-arrow { + bottom: 0; + left: 50%; + margin-left: -5px; + border-width: 5px 5px 0; + border-top-color: #000; +} +.tooltip.top-left .tooltip-arrow { + right: 5px; + bottom: 0; + margin-bottom: -5px; + border-width: 5px 5px 0; + border-top-color: #000; +} +.tooltip.top-right .tooltip-arrow { + bottom: 0; + left: 5px; + margin-bottom: -5px; + border-width: 5px 5px 0; + border-top-color: #000; +} +.tooltip.right .tooltip-arrow { + top: 50%; + left: 0; + margin-top: -5px; + border-width: 5px 5px 5px 0; + border-right-color: #000; +} +.tooltip.left .tooltip-arrow { + top: 50%; + right: 0; + margin-top: -5px; + border-width: 5px 0 5px 5px; + border-left-color: #000; +} +.tooltip.bottom .tooltip-arrow { + top: 0; + left: 50%; + margin-left: -5px; + border-width: 0 5px 5px; + border-bottom-color: #000; +} +.tooltip.bottom-left .tooltip-arrow { + top: 0; + right: 5px; + margin-top: -5px; + border-width: 0 5px 5px; + border-bottom-color: #000; +} +.tooltip.bottom-right .tooltip-arrow { + top: 0; + left: 5px; + margin-top: -5px; + border-width: 0 5px 5px; + border-bottom-color: #000; +} +.popover { + position: absolute; + top: 0; + left: 0; + z-index: 1060; + display: none; + max-width: 276px; + padding: 1px; + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 14px; + font-style: normal; + font-weight: normal; + line-height: 1.42857143; + text-align: left; + text-align: start; + text-decoration: none; + text-shadow: none; + text-transform: none; + letter-spacing: normal; + word-break: normal; + word-spacing: normal; + word-wrap: normal; + white-space: normal; + background-color: #fff; + -webkit-background-clip: padding-box; + background-clip: padding-box; + border: 1px solid #ccc; + border: 1px solid rgba(0, 0, 0, .2); + border-radius: 6px; + -webkit-box-shadow: 0 5px 10px rgba(0, 0, 0, .2); + box-shadow: 0 5px 10px rgba(0, 0, 0, .2); + + line-break: auto; +} +.popover.top { + margin-top: -10px; +} +.popover.right { + margin-left: 10px; +} +.popover.bottom { + margin-top: 10px; +} +.popover.left { + margin-left: -10px; +} +.popover-title { + padding: 8px 14px; + margin: 0; + font-size: 14px; + background-color: #f7f7f7; + border-bottom: 1px solid #ebebeb; + border-radius: 5px 5px 0 0; +} +.popover-content { + padding: 9px 14px; +} +.popover > .arrow, +.popover > .arrow:after { + position: absolute; + display: block; + width: 0; + height: 0; + border-color: transparent; + border-style: solid; +} +.popover > .arrow { + border-width: 11px; +} +.popover > .arrow:after { + content: ""; + border-width: 10px; +} +.popover.top > .arrow { + bottom: -11px; + left: 50%; + margin-left: -11px; + border-top-color: #999; + border-top-color: rgba(0, 0, 0, .25); + border-bottom-width: 0; +} +.popover.top > .arrow:after { + bottom: 1px; + margin-left: -10px; + content: " "; + border-top-color: #fff; + border-bottom-width: 0; +} +.popover.right > .arrow { + top: 50%; + left: -11px; + margin-top: -11px; + border-right-color: #999; + border-right-color: rgba(0, 0, 0, .25); + border-left-width: 0; +} +.popover.right > .arrow:after { + bottom: -10px; + left: 1px; + content: " "; + border-right-color: #fff; + border-left-width: 0; +} +.popover.bottom > .arrow { + top: -11px; + left: 50%; + margin-left: -11px; + border-top-width: 0; + border-bottom-color: #999; + border-bottom-color: rgba(0, 0, 0, .25); +} +.popover.bottom > .arrow:after { + top: 1px; + margin-left: -10px; + content: " "; + border-top-width: 0; + border-bottom-color: #fff; +} +.popover.left > .arrow { + top: 50%; + right: -11px; + margin-top: -11px; + border-right-width: 0; + border-left-color: #999; + border-left-color: rgba(0, 0, 0, .25); +} +.popover.left > .arrow:after { + right: 1px; + bottom: -10px; + content: " "; + border-right-width: 0; + border-left-color: #fff; +} +.carousel { + position: relative; +} +.carousel-inner { + position: relative; + width: 100%; + overflow: hidden; +} +.carousel-inner > .item { + position: relative; + display: none; + -webkit-transition: .6s ease-in-out left; + -o-transition: .6s ease-in-out left; + transition: .6s ease-in-out left; +} +.carousel-inner > .item > img, +.carousel-inner > .item > a > img { + line-height: 1; +} +@media all and (transform-3d), (-webkit-transform-3d) { + .carousel-inner > .item { + -webkit-transition: -webkit-transform .6s ease-in-out; + -o-transition: -o-transform .6s ease-in-out; + transition: transform .6s ease-in-out; + + -webkit-backface-visibility: hidden; + backface-visibility: hidden; + -webkit-perspective: 1000px; + perspective: 1000px; + } + .carousel-inner > .item.next, + .carousel-inner > .item.active.right { + left: 0; + -webkit-transform: translate3d(100%, 0, 0); + transform: translate3d(100%, 0, 0); + } + .carousel-inner > .item.prev, + .carousel-inner > .item.active.left { + left: 0; + -webkit-transform: translate3d(-100%, 0, 0); + transform: translate3d(-100%, 0, 0); + } + .carousel-inner > .item.next.left, + .carousel-inner > .item.prev.right, + .carousel-inner > .item.active { + left: 0; + -webkit-transform: translate3d(0, 0, 0); + transform: translate3d(0, 0, 0); + } +} +.carousel-inner > .active, +.carousel-inner > .next, +.carousel-inner > .prev { + display: block; +} +.carousel-inner > .active { + left: 0; +} +.carousel-inner > .next, +.carousel-inner > .prev { + position: absolute; + top: 0; + width: 100%; +} +.carousel-inner > .next { + left: 100%; +} +.carousel-inner > .prev { + left: -100%; +} +.carousel-inner > .next.left, +.carousel-inner > .prev.right { + left: 0; +} +.carousel-inner > .active.left { + left: -100%; +} +.carousel-inner > .active.right { + left: 100%; +} +.carousel-control { + position: absolute; + top: 0; + bottom: 0; + left: 0; + width: 15%; + font-size: 20px; + color: #fff; + text-align: center; + text-shadow: 0 1px 2px rgba(0, 0, 0, .6); + background-color: rgba(0, 0, 0, 0); + filter: alpha(opacity=50); + opacity: .5; +} +.carousel-control.left { + background-image: -webkit-linear-gradient(left, rgba(0, 0, 0, .5) 0%, rgba(0, 0, 0, .0001) 100%); + background-image: -o-linear-gradient(left, rgba(0, 0, 0, .5) 0%, rgba(0, 0, 0, .0001) 100%); + background-image: -webkit-gradient(linear, left top, right top, from(rgba(0, 0, 0, .5)), to(rgba(0, 0, 0, .0001))); + background-image: linear-gradient(to right, rgba(0, 0, 0, .5) 0%, rgba(0, 0, 0, .0001) 100%); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1); + background-repeat: repeat-x; +} +.carousel-control.right { + right: 0; + left: auto; + background-image: -webkit-linear-gradient(left, rgba(0, 0, 0, .0001) 0%, rgba(0, 0, 0, .5) 100%); + background-image: -o-linear-gradient(left, rgba(0, 0, 0, .0001) 0%, rgba(0, 0, 0, .5) 100%); + background-image: -webkit-gradient(linear, left top, right top, from(rgba(0, 0, 0, .0001)), to(rgba(0, 0, 0, .5))); + background-image: linear-gradient(to right, rgba(0, 0, 0, .0001) 0%, rgba(0, 0, 0, .5) 100%); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1); + background-repeat: repeat-x; +} +.carousel-control:hover, +.carousel-control:focus { + color: #fff; + text-decoration: none; + filter: alpha(opacity=90); + outline: 0; + opacity: .9; +} +.carousel-control .icon-prev, +.carousel-control .icon-next, +.carousel-control .glyphicon-chevron-left, +.carousel-control .glyphicon-chevron-right { + position: absolute; + top: 50%; + z-index: 5; + display: inline-block; + margin-top: -10px; +} +.carousel-control .icon-prev, +.carousel-control .glyphicon-chevron-left { + left: 50%; + margin-left: -10px; +} +.carousel-control .icon-next, +.carousel-control .glyphicon-chevron-right { + right: 50%; + margin-right: -10px; +} +.carousel-control .icon-prev, +.carousel-control .icon-next { + width: 20px; + height: 20px; + font-family: serif; + line-height: 1; +} +.carousel-control .icon-prev:before { + content: '\2039'; +} +.carousel-control .icon-next:before { + content: '\203a'; +} +.carousel-indicators { + position: absolute; + bottom: 10px; + left: 50%; + z-index: 15; + width: 60%; + padding-left: 0; + margin-left: -30%; + text-align: center; + list-style: none; +} +.carousel-indicators li { + display: inline-block; + width: 10px; + height: 10px; + margin: 1px; + text-indent: -999px; + cursor: pointer; + background-color: #000 \9; + background-color: rgba(0, 0, 0, 0); + border: 1px solid #fff; + border-radius: 10px; +} +.carousel-indicators .active { + width: 12px; + height: 12px; + margin: 0; + background-color: #fff; +} +.carousel-caption { + position: absolute; + right: 15%; + bottom: 20px; + left: 15%; + z-index: 10; + padding-top: 20px; + padding-bottom: 20px; + color: #fff; + text-align: center; + text-shadow: 0 1px 2px rgba(0, 0, 0, .6); +} +.carousel-caption .btn { + text-shadow: none; +} +@media screen and (min-width: 768px) { + .carousel-control .glyphicon-chevron-left, + .carousel-control .glyphicon-chevron-right, + .carousel-control .icon-prev, + .carousel-control .icon-next { + width: 30px; + height: 30px; + margin-top: -10px; + font-size: 30px; + } + .carousel-control .glyphicon-chevron-left, + .carousel-control .icon-prev { + margin-left: -10px; + } + .carousel-control .glyphicon-chevron-right, + .carousel-control .icon-next { + margin-right: -10px; + } + .carousel-caption { + right: 20%; + left: 20%; + padding-bottom: 30px; + } + .carousel-indicators { + bottom: 20px; + } +} +.clearfix:before, +.clearfix:after, +.dl-horizontal dd:before, +.dl-horizontal dd:after, +.container:before, +.container:after, +.container-fluid:before, +.container-fluid:after, +.row:before, +.row:after, +.form-horizontal .form-group:before, +.form-horizontal .form-group:after, +.btn-toolbar:before, +.btn-toolbar:after, +.btn-group-vertical > .btn-group:before, +.btn-group-vertical > .btn-group:after, +.nav:before, +.nav:after, +.navbar:before, +.navbar:after, +.navbar-header:before, +.navbar-header:after, +.navbar-collapse:before, +.navbar-collapse:after, +.pager:before, +.pager:after, +.panel-body:before, +.panel-body:after, +.modal-header:before, +.modal-header:after, +.modal-footer:before, +.modal-footer:after { + display: table; + content: " "; +} +.clearfix:after, +.dl-horizontal dd:after, +.container:after, +.container-fluid:after, +.row:after, +.form-horizontal .form-group:after, +.btn-toolbar:after, +.btn-group-vertical > .btn-group:after, +.nav:after, +.navbar:after, +.navbar-header:after, +.navbar-collapse:after, +.pager:after, +.panel-body:after, +.modal-header:after, +.modal-footer:after { + clear: both; +} +.center-block { + display: block; + margin-right: auto; + margin-left: auto; +} +.pull-right { + float: right !important; +} +.pull-left { + float: left !important; +} +.hide { + display: none !important; +} +.show { + display: block !important; +} +.invisible { + visibility: hidden; +} +.text-hide { + font: 0/0 a; + color: transparent; + text-shadow: none; + background-color: transparent; + border: 0; +} +.hidden { + display: none !important; +} +.affix { + position: fixed; +} +@-ms-viewport { + width: device-width; +} +.visible-xs, +.visible-sm, +.visible-md, +.visible-lg { + display: none !important; +} +.visible-xs-block, +.visible-xs-inline, +.visible-xs-inline-block, +.visible-sm-block, +.visible-sm-inline, +.visible-sm-inline-block, +.visible-md-block, +.visible-md-inline, +.visible-md-inline-block, +.visible-lg-block, +.visible-lg-inline, +.visible-lg-inline-block { + display: none !important; +} +@media (max-width: 767px) { + .visible-xs { + display: block !important; + } + table.visible-xs { + display: table !important; + } + tr.visible-xs { + display: table-row !important; + } + th.visible-xs, + td.visible-xs { + display: table-cell !important; + } +} +@media (max-width: 767px) { + .visible-xs-block { + display: block !important; + } +} +@media (max-width: 767px) { + .visible-xs-inline { + display: inline !important; + } +} +@media (max-width: 767px) { + .visible-xs-inline-block { + display: inline-block !important; + } +} +@media (min-width: 768px) and (max-width: 991px) { + .visible-sm { + display: block !important; + } + table.visible-sm { + display: table !important; + } + tr.visible-sm { + display: table-row !important; + } + th.visible-sm, + td.visible-sm { + display: table-cell !important; + } +} +@media (min-width: 768px) and (max-width: 991px) { + .visible-sm-block { + display: block !important; + } +} +@media (min-width: 768px) and (max-width: 991px) { + .visible-sm-inline { + display: inline !important; + } +} +@media (min-width: 768px) and (max-width: 991px) { + .visible-sm-inline-block { + display: inline-block !important; + } +} +@media (min-width: 992px) and (max-width: 1199px) { + .visible-md { + display: block !important; + } + table.visible-md { + display: table !important; + } + tr.visible-md { + display: table-row !important; + } + th.visible-md, + td.visible-md { + display: table-cell !important; + } +} +@media (min-width: 992px) and (max-width: 1199px) { + .visible-md-block { + display: block !important; + } +} +@media (min-width: 992px) and (max-width: 1199px) { + .visible-md-inline { + display: inline !important; + } +} +@media (min-width: 992px) and (max-width: 1199px) { + .visible-md-inline-block { + display: inline-block !important; + } +} +@media (min-width: 1200px) { + .visible-lg { + display: block !important; + } + table.visible-lg { + display: table !important; + } + tr.visible-lg { + display: table-row !important; + } + th.visible-lg, + td.visible-lg { + display: table-cell !important; + } +} +@media (min-width: 1200px) { + .visible-lg-block { + display: block !important; + } +} +@media (min-width: 1200px) { + .visible-lg-inline { + display: inline !important; + } +} +@media (min-width: 1200px) { + .visible-lg-inline-block { + display: inline-block !important; + } +} +@media (max-width: 767px) { + .hidden-xs { + display: none !important; + } +} +@media (min-width: 768px) and (max-width: 991px) { + .hidden-sm { + display: none !important; + } +} +@media (min-width: 992px) and (max-width: 1199px) { + .hidden-md { + display: none !important; + } +} +@media (min-width: 1200px) { + .hidden-lg { + display: none !important; + } +} +.visible-print { + display: none !important; +} +@media print { + .visible-print { + display: block !important; + } + table.visible-print { + display: table !important; + } + tr.visible-print { + display: table-row !important; + } + th.visible-print, + td.visible-print { + display: table-cell !important; + } +} +.visible-print-block { + display: none !important; +} +@media print { + .visible-print-block { + display: block !important; + } +} +.visible-print-inline { + display: none !important; +} +@media print { + .visible-print-inline { + display: inline !important; + } +} +.visible-print-inline-block { + display: none !important; +} +@media print { + .visible-print-inline-block { + display: inline-block !important; + } +} +@media print { + .hidden-print { + display: none !important; + } +} +/*# sourceMappingURL=bootstrap.css.map */ diff --git a/css/bootstrap.min.css b/css/bootstrap.min.css new file mode 100644 index 0000000..82113bd --- /dev/null +++ b/css/bootstrap.min.css @@ -0,0 +1,6 @@ +/*! + * Bootstrap v3.3.7 (http://getbootstrap.com) + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid silver}legend{padding:0;border:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;margin-left:-5px;list-style:none}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857143;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;color:#333;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=file]:focus,input[type=checkbox]:focus,input[type=radio]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=time].form-control,input[type=datetime-local].form-control,input[type=month].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=time],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],input[type=date].input-sm,input[type=time].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=time],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],input[type=date].input-lg,input[type=time].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.checkbox-inline.disabled,.radio-inline.disabled,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio-inline{cursor:not-allowed}.checkbox.disabled label,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .radio label{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-image:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary:hover{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{right:0;left:auto}.dropdown-menu-left{right:auto;left:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-bottom-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-bottom,.navbar-fixed-top{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}}@media (min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:3;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron .h1,.jumbotron h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:12px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;filter:alpha(opacity=0);opacity:0;line-break:auto}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px;bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);line-break:auto}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{bottom:-10px;left:1px;content:" ";border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;bottom:-10px;content:" ";border-right-width:0;border-left-color:#fff}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6);background-color:rgba(0,0,0,0);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000\9;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.modal-header:after,.modal-header:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.modal-header:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-md,.visible-sm,.visible-xs{display:none!important}.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}} +/*# sourceMappingURL=bootstrap.min.css.map */ diff --git a/css/nivo-lightbox/close.png b/css/nivo-lightbox/close.png new file mode 100644 index 0000000..e802c69 Binary files /dev/null and b/css/nivo-lightbox/close.png differ diff --git a/css/nivo-lightbox/close@2x.png b/css/nivo-lightbox/close@2x.png new file mode 100644 index 0000000..60b37fa Binary files /dev/null and b/css/nivo-lightbox/close@2x.png differ diff --git a/css/nivo-lightbox/default.css b/css/nivo-lightbox/default.css new file mode 100644 index 0000000..41fbb58 --- /dev/null +++ b/css/nivo-lightbox/default.css @@ -0,0 +1,103 @@ +/* + * Nivo Lightbox Default Theme v1.0 + * http://dev7studios.com/nivo-lightbox + * + * Copyright 2013, Dev7studios + * Free to use and abuse under the MIT license. + * http://www.opensource.org/licenses/mit-license.php + */ + + .nivo-lightbox-theme-default * { + -webkit-box-sizing: initial !important; + -moz-box-sizing: initial !important; + box-sizing: initial !important; + } +.nivo-lightbox-theme-default.nivo-lightbox-overlay { + background: #666; + background: rgba(0,0,0,0.6); +} +.nivo-lightbox-theme-default .nivo-lightbox-content.nivo-lightbox-loading { background: url(loading.gif) no-repeat 50% 50%; } + +.nivo-lightbox-theme-default .nivo-lightbox-nav { + top: 10%; + width: 8%; + height: 80%; + text-indent: -9999px; + background-repeat: no-repeat; + background-position: 50% 50%; + opacity: 0.5; +} +.nivo-lightbox-theme-default .nivo-lightbox-nav:hover { + opacity: 1; + background-color: transparent; +} +.nivo-lightbox-theme-default .nivo-lightbox-prev { + background-image: url(prev.png); + border-radius: 0 3px 3px 0; +} +.nivo-lightbox-theme-default .nivo-lightbox-next { + background-image: url(next.png); + border-radius: 3px 0 0 3px; +} + +.nivo-lightbox-theme-default .nivo-lightbox-close { + display: block; + background: url(close.png) no-repeat 5px 5px; + width: 16px; + height: 16px; + text-indent: -9999px; + padding: 5px; + opacity: 0.5; +} +.nivo-lightbox-theme-default .nivo-lightbox-close:hover { opacity: 1; } + +.nivo-lightbox-theme-default .nivo-lightbox-title-wrap { bottom: -7%; } +.nivo-lightbox-theme-default .nivo-lightbox-title { + font: 14px/20px 'Helvetica Neue', Helvetica, Arial, sans-serif; + font-style: normal; + font-weight: normal; + background: #000; + color: #fff; + padding: 7px 15px; + border-radius: 30px; +} + +.nivo-lightbox-theme-default .nivo-lightbox-image img { + background: #fff; + -webkit-box-shadow: 0px 1px 1px rgba(0,0,0,0.4); + -moz-box-shadow: 0px 1px 1px rgba(0,0,0,0.4); + box-shadow: 0px 1px 1px rgba(0,0,0,0.4); +} +.nivo-lightbox-theme-default .nivo-lightbox-ajax, +.nivo-lightbox-theme-default .nivo-lightbox-inline { + background: #fff; + padding: 40px; + -webkit-box-shadow: 0px 1px 1px rgba(0,0,0,0.4); + -moz-box-shadow: 0px 1px 1px rgba(0,0,0,0.4); + box-shadow: 0px 1px 1px rgba(0,0,0,0.4); +} + +@media (-webkit-min-device-pixel-ratio: 1.3), + (-o-min-device-pixel-ratio: 2.6/2), + (min--moz-device-pixel-ratio: 1.3), + (min-device-pixel-ratio: 1.3), + (min-resolution: 1.3dppx) { + + .nivo-lightbox-theme-default .nivo-lightbox-content.nivo-lightbox-loading { + background-image: url(loading@2x.gif); + background-size: 32px 32px; + } + .nivo-lightbox-theme-default .nivo-lightbox-prev { + background-image: url(prev@2x.png); + background-size: 48px 48px; + } + .nivo-lightbox-theme-default .nivo-lightbox-next { + background-image: url(next@2x.png); + background-size: 48px 48px; + } + .nivo-lightbox-theme-default .nivo-lightbox-close { + background-image: url(close@2x.png); + background-size: 16px 16px; + } + +} diff --git a/css/nivo-lightbox/loading.gif b/css/nivo-lightbox/loading.gif new file mode 100644 index 0000000..5b796c4 Binary files /dev/null and b/css/nivo-lightbox/loading.gif differ diff --git a/css/nivo-lightbox/loading@2x.gif b/css/nivo-lightbox/loading@2x.gif new file mode 100644 index 0000000..dc8aeff Binary files /dev/null and b/css/nivo-lightbox/loading@2x.gif differ diff --git a/css/nivo-lightbox/next.png b/css/nivo-lightbox/next.png new file mode 100644 index 0000000..462b64e Binary files /dev/null and b/css/nivo-lightbox/next.png differ diff --git a/css/nivo-lightbox/next@2x.png b/css/nivo-lightbox/next@2x.png new file mode 100644 index 0000000..f246ae9 Binary files /dev/null and b/css/nivo-lightbox/next@2x.png differ diff --git a/css/nivo-lightbox/nivo-lightbox.css b/css/nivo-lightbox/nivo-lightbox.css new file mode 100644 index 0000000..f46886f --- /dev/null +++ b/css/nivo-lightbox/nivo-lightbox.css @@ -0,0 +1,205 @@ +/* + * Nivo Lightbox v1.2.0 + * http://dev7studios.com/nivo-lightbox + * + * Copyright 2013, Dev7studios + * Free to use and abuse under the MIT license. + * http://www.opensource.org/licenses/mit-license.php + */ + +.nivo-lightbox-overlay { + position: fixed; + top: 0; + left: 0; + z-index: 99998; + width: 100%; + height: 100%; + overflow: hidden; + visibility: hidden; + opacity: 0; + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} +.nivo-lightbox-overlay.nivo-lightbox-open { + visibility: visible; + opacity: 1; +} +.nivo-lightbox-wrap { + position: absolute; + top: 10%; + bottom: 10%; + left: 10%; + right: 10%; +} +.nivo-lightbox-content { + width: 100%; + height: 100%; +} +.nivo-lightbox-title-wrap { + position: absolute; + bottom: 0; + left: 0; + width: 100%; + z-index: 99999; + text-align: center; +} +.nivo-lightbox-nav { display: none; } +.nivo-lightbox-prev { + position: absolute; + top: 50%; + left: 0; +} +.nivo-lightbox-next { + position: absolute; + top: 50%; + right: 0; +} +.nivo-lightbox-close { + position: absolute; + top: 2%; + right: 2%; +} + +.nivo-lightbox-image { text-align: center; } +.nivo-lightbox-image img { + max-width: 100%; + max-height: 100%; + width: auto; + height: auto; + vertical-align: middle; +} +.nivo-lightbox-content iframe { + width: 100%; + height: 100%; +} +.nivo-lightbox-inline, +.nivo-lightbox-ajax { + max-height: 100%; + overflow: auto; + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; + /* https://bugzilla.mozilla.org/show_bug.cgi?id=308801 */ +} +.nivo-lightbox-error { + display: table; + text-align: center; + width: 100%; + height: 100%; + color: #fff; + text-shadow: 0 1px 1px #000; +} +.nivo-lightbox-error p { + display: table-cell; + vertical-align: middle; +} + +/* Effects + **********************************************/ +.nivo-lightbox-notouch .nivo-lightbox-effect-fade, +.nivo-lightbox-notouch .nivo-lightbox-effect-fadeScale, +.nivo-lightbox-notouch .nivo-lightbox-effect-slideLeft, +.nivo-lightbox-notouch .nivo-lightbox-effect-slideRight, +.nivo-lightbox-notouch .nivo-lightbox-effect-slideUp, +.nivo-lightbox-notouch .nivo-lightbox-effect-slideDown, +.nivo-lightbox-notouch .nivo-lightbox-effect-fall { + -webkit-transition: all 0.2s ease-in-out; + -moz-transition: all 0.2s ease-in-out; + -ms-transition: all 0.2s ease-in-out; + -o-transition: all 0.2s ease-in-out; + transition: all 0.2s ease-in-out; +} + +/* fadeScale */ +.nivo-lightbox-effect-fadeScale .nivo-lightbox-wrap { + -webkit-transition: all 0.3s; + -moz-transition: all 0.3s; + -ms-transition: all 0.3s; + -o-transition: all 0.3s; + transition: all 0.3s; + -webkit-transform: scale(0.7); + -moz-transform: scale(0.7); + -ms-transform: scale(0.7); + transform: scale(0.7); +} +.nivo-lightbox-effect-fadeScale.nivo-lightbox-open .nivo-lightbox-wrap { + -webkit-transform: scale(1); + -moz-transform: scale(1); + -ms-transform: scale(1); + transform: scale(1); +} + +/* slideLeft / slideRight / slideUp / slideDown */ +.nivo-lightbox-effect-slideLeft .nivo-lightbox-wrap, +.nivo-lightbox-effect-slideRight .nivo-lightbox-wrap, +.nivo-lightbox-effect-slideUp .nivo-lightbox-wrap, +.nivo-lightbox-effect-slideDown .nivo-lightbox-wrap { + -webkit-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); + -moz-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); + -ms-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); + -o-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); + transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); +} +.nivo-lightbox-effect-slideLeft .nivo-lightbox-wrap { + -webkit-transform: translateX(-10%); + -moz-transform: translateX(-10%); + -ms-transform: translateX(-10%); + transform: translateX(-10%); +} +.nivo-lightbox-effect-slideRight .nivo-lightbox-wrap { + -webkit-transform: translateX(10%); + -moz-transform: translateX(10%); + -ms-transform: translateX(10%); + transform: translateX(10%); +} +.nivo-lightbox-effect-slideLeft.nivo-lightbox-open .nivo-lightbox-wrap, +.nivo-lightbox-effect-slideRight.nivo-lightbox-open .nivo-lightbox-wrap { + -webkit-transform: translateX(0); + -moz-transform: translateX(0); + -ms-transform: translateX(0); + transform: translateX(0); +} +.nivo-lightbox-effect-slideDown .nivo-lightbox-wrap { + -webkit-transform: translateY(-10%); + -moz-transform: translateY(-10%); + -ms-transform: translateY(-10%); + transform: translateY(-10%); +} +.nivo-lightbox-effect-slideUp .nivo-lightbox-wrap { + -webkit-transform: translateY(10%); + -moz-transform: translateY(10%); + -ms-transform: translateY(10%); + transform: translateY(10%); +} +.nivo-lightbox-effect-slideUp.nivo-lightbox-open .nivo-lightbox-wrap, +.nivo-lightbox-effect-slideDown.nivo-lightbox-open .nivo-lightbox-wrap { + -webkit-transform: translateY(0); + -moz-transform: translateY(0); + -ms-transform: translateY(0); + transform: translateY(0); +} + +/* fall */ +.nivo-lightbox-body-effect-fall .nivo-lightbox-effect-fall { + -webkit-perspective: 1000px; + -moz-perspective: 1000px; + perspective: 1000px; +} +.nivo-lightbox-effect-fall .nivo-lightbox-wrap { + -webkit-transition: all 0.3s ease-out; + -moz-transition: all 0.3s ease-out; + -ms-transition: all 0.3s ease-out; + -o-transition: all 0.3s ease-out; + transition: all 0.3s ease-out; + -webkit-transform: translateZ(300px); + -moz-transform: translateZ(300px); + -ms-transform: translateZ(300px); + transform: translateZ(300px); +} +.nivo-lightbox-effect-fall.nivo-lightbox-open .nivo-lightbox-wrap { + -webkit-transform: translateZ(0); + -moz-transform: translateZ(0); + -ms-transform: translateZ(0); + transform: translateZ(0); +} diff --git a/css/nivo-lightbox/prev.png b/css/nivo-lightbox/prev.png new file mode 100644 index 0000000..eda62a7 Binary files /dev/null and b/css/nivo-lightbox/prev.png differ diff --git a/css/nivo-lightbox/prev@2x.png b/css/nivo-lightbox/prev@2x.png new file mode 100644 index 0000000..49a2e4d Binary files /dev/null and b/css/nivo-lightbox/prev@2x.png differ diff --git a/css/style.css b/css/style.css new file mode 100644 index 0000000..d08af92 --- /dev/null +++ b/css/style.css @@ -0,0 +1,623 @@ +body, +html { + font-family: "Open Sans", sans-serif; + text-rendering: optimizeLegibility !important; + -webkit-font-smoothing: antialiased !important; + color: #777; + font-weight: 400; + width: 100% !important; + height: 100% !important; +} +h2, +h3, +h4 { + font-family: "Raleway", sans-serif; +} +logo { + object-fit: fill; +} +h2 { + text-transform: uppercase; + margin: 0 0 20px; + font-weight: 800; + font-size: 36px; + color: #333; +} +h3 { + font-size: 20px; + font-weight: 600; + color: #333; +} +h4 { + font-size: 18px; + color: #333; + font-weight: 600; +} +h5 { + text-transform: uppercase; + font-weight: 700; + line-height: 20px; +} +p { + font-size: 15px; +} +p.intro { + margin: 12px 0 0; + line-height: 24px; +} +a { + color: #608dfd; + font-weight: 400; +} +a:hover, +a:focus { + text-decoration: none; + color: #608dfd; +} +ul, +ol { + list-style: none; +} +ul, +ol { + padding: 0; + webkit-padding: 0; + moz-padding: 0; +} +hr { + height: 2px; + width: 70px; + text-align: center; + position: relative; + background: #1e7a46; + margin-bottom: 20px; + border: 0; +} +/* Navigation */ +#menu { + padding: 15px; + transition: all 0.8s; +} +#menu.navbar-default { + /*background-color: #fff;*/ /* prev */ + background-color: black; + border-color: rgba(231, 231, 231, 0); + box-shadow: 0 0 10px rgba(0, 0, 0, 0.15); +} +#menu a.navbar-brand { + font-family: "Raleway", sans-serif; + font-weight: 700; + /*color: #333;*/ /* prev */ + color: #fff; + text-transform: uppercase; +} +#menu.navbar-default .navbar-nav > li > a { + font-family: "Lato", sans-serif; + text-transform: uppercase; + /*color: #555;*/ /* prev */ + color: #fff; + font-size: 15px; + font-weight: 400; + padding: 8px 2px; + border-radius: 0; + margin: 9px 20px 0; +} +#menu.navbar-default .navbar-nav > li > a:after { + display: block; + position: absolute; + left: 0; + bottom: -1px; + width: 0; + height: 2px; + background: linear-gradient(to right, #6372ff 0%, #5ca9fb 100%); + content: ""; + transition: width 0.2s; +} +#menu.navbar-default .navbar-nav > li > a:hover:after { + width: 100%; +} +.navbar-default .navbar-nav > .active > a, +.navbar-default .navbar-nav > .active > a:hover, +.navbar-default .navbar-nav > .active > a:focus { + background-color: transparent; +} +.navbar-default .navbar-nav > .active > a:after, +.navbar-default .navbar-nav > .active > a:hover:after, +.navbar-default .navbar-nav > .active > a:focus:after { + display: block !important; + position: absolute !important; + left: 0 !important; + bottom: -1px !important; + width: 100% !important; + height: 2px !important; + background: linear-gradient(to right, #6372ff 0%, #5ca9fb 100%) !important; + content: "" !important; + transition: width 0.2s !important; +} +.navbar-toggle { + border-radius: 0; +} +.navbar-default .navbar-toggle:hover, +.navbar-default .navbar-toggle:focus { + background-color: #fff; + border-color: #608dfd; +} +.navbar-default .navbar-toggle:hover > .icon-bar { + background-color: #608dfd; +} +.section-title { + margin-bottom: 70px; +} +.section-title h2 { + position: relative; + margin-top: 10px; + margin-bottom: 15px; + padding-bottom: 15px; +} +.section-title h2::after { + position: absolute; + content: ""; + background: linear-gradient(to right, #5ca9fb 0%, #6372ff 100%); + height: 4px; + width: 60px; + bottom: 0; + margin-left: -30px; + left: 50%; +} +.section-title p { + font-size: 18px; +} +.btn-custom { + font-family: "Raleway", sans-serif; + text-transform: uppercase; + /*color: #fff;*/ + color: black; + background-color: #5ca9fb; + background-image: linear-gradient(to right, #5ca9fb 0%, #6372ff 100%); + padding: 14px 34px; + letter-spacing: 1px; + margin: 0; + font-size: 15px; + font-weight: 650; + border-radius: 25px; + transition: all 0.5s linear; + border: 0; +} +.btn-custom:hover, +.btn-custom:focus, +.btn-custom.focus, +.btn-custom:active, +.btn-custom.active { + color: #fff; + background-image: none; + background-color: #6372ff; +} +.btn:active, +.btn.active { + background-image: none; + outline: 0; + -webkit-box-shadow: none; + box-shadow: none; +} +a:focus, +.btn:focus, +.btn:active:focus, +.btn.active:focus, +.btn.focus, +.btn:active.focus, +.btn.active.focus { + outline: none; + outline-offset: none; +} +/* Header Section */ +.intro { + display: table; + width: 100%; + padding: 0; + background: url(../img/intro-bg.jpg) center center no-repeat; + background-color: #e5e5e5; + -webkit-background-size: cover; + -moz-background-size: cover; + background-size: cover; + -o-background-size: cover; +} +.intro .overlay { + background: rgba(0, 0, 0, 0.2); +} +.intro h1 { + font-family: "Raleway", sans-serif; + color: #fff; + font-size: 82px; + font-weight: 700; + text-transform: uppercase; + margin-top: 0; + margin-bottom: 10px; +} +.intro h1 span { + font-weight: 800; + color: #5ca9fb; +} +.intro p { + color: #fff; + font-size: 22px; + font-weight: 300; + line-height: 30px; + margin: 0 auto; + margin-bottom: 60px; +} +header .intro-text { + padding-top: 200px; + padding-bottom: 200px; + text-align: center; +} +/* Features Section */ +#features { + background: #f6f6f6; + padding-top: 30px; + padding-bottom: 30px; +} + +#features i.fa { + font-size: 38px; + margin-bottom: 20px; + transition: all 0.5s; + color: #fff; + width: 100px; + height: 100px; + padding: 30px 0; + border-radius: 50%; + background: linear-gradient(to right, #6372ff 0%, #5ca9fb 100%); + box-shadow: 10px 10px 10px rgba(0, 0, 0, 0.05); +} +/* About Section */ +#about { + padding: 100px 0; +} +#about h3 { + font-size: 22px; + margin: 0 0 20px; +} +#about h2 { + position: relative; + margin-bottom: 15px; + padding-bottom: 15px; +} +#about h2::after { + position: absolute; + content: ""; + background: linear-gradient(to right, #5ca9fb 0%, #6372ff 100%); + height: 4px; + width: 60px; + bottom: 0; + left: 0; +} +#about .about-text li { + margin-bottom: 6px; + margin-left: 6px; + list-style: none; + padding: 0; +} +#about .about-text li:before { + content: "\f00c"; + font-family: "FontAwesome"; + color: #5ca9fb; + font-size: 11px; + font-weight: 300; + padding-right: 8px; +} +#about img { + width: 520px; + margin-top: 10px; + background: #fff; + border-right: 0; + box-shadow: 0 0 50px rgba(0, 0, 0, 0.06); +} +#about p { + line-height: 24px; + margin: 30px 0; +} +/* Contribute Section */ +#contribute { + padding: 100px 0; + background: linear-gradient(to right, #6372ff 0%, #5ca9fb 100%); + color: #fff; +} +#contribute .service-desc { + margin: 10px 10px 20px; +} +#contribute h2 { + color: #fff; +} +#contribute .section-title h2::after { + position: absolute; + content: ""; + background: rgba(255, 255, 255, 0.3); + height: 4px; + width: 60px; + bottom: 0; + margin-left: -30px; + left: 50%; +} +#contribute i.fa { + font-size: 42px; + width: 120px; + height: 120px; + padding: 40px 0; + background: linear-gradient(to right, #6372ff 0%, #5ca9fb 100%); + border-radius: 50%; + color: #fff; + box-shadow: 10px 10px 10px rgba(0, 0, 0, 0.05); +} +#contribute h3 { + font-weight: 500; + padding: 5px 0; + color: #fff; +} +#contribute p { + color: rgba(255, 255, 255, 0.75); +} +#contribute .service-desc { + margin-bottom: 40px; +} +/* Getting Started Section */ +#getting_started { + padding: 40px 0; +} +.getting_started-item { + margin: 1px -15px 0 -14px; + padding: 0; +} +.getting_started-item .hover-bg { + overflow: hidden; + position: relative; + margin: 0; +} +.hover-bg .hover-text { + position: absolute; + text-align: center; + margin: 0 auto; + color: #fff; + background: linear-gradient( + to right, + rgba(99, 114, 255, 0.8) 0%, + rgba(92, 169, 251, 0.8) 100% + ); + padding: 30% 0 0; + height: 100%; + width: 100%; + opacity: 0; + transition: all 0.5s; +} +.hover-bg .hover-text > h4 { + opacity: 0; + color: #fff; + -webkit-transform: translateY(100%); + transform: translateY(100%); + transition: all 0.3s; + font-size: 18px; + letter-spacing: 1px; + font-weight: 500; + text-transform: uppercase; +} +.hover-bg:hover .hover-text > h4 { + opacity: 1; + -webkit-backface-visibility: hidden; + -webkit-transform: translateY(0); + transform: translateY(0); +} +.hover-bg:hover .hover-text { + opacity: 1; +} +/* Testimonials Section */ +#testimonials { + padding: 100px 0; + background: #f6f6f6; +} +#testimonials i { + color: #e6e6e6; + font-size: 32px; + margin-bottom: 20px; +} +.testimonial { + position: relative; + padding: 20px; +} +.testimonial-image { + float: left; + margin-right: 15px; +} +.testimonial-image, +.testimonial-image img { + display: block; + width: 64px; + height: 64px; + border-radius: 50%; +} +.testimonial-content { + position: relative; + overflow: hidden; +} +.testimonial-content p { + margin-bottom: 0; + font-size: 14px; + font-style: italic; +} +.testimonial-meta { + margin-top: 10px; + font-size: 15px; + font-weight: 600; + color: #666; +} +/* Team Section */ +#team { + padding: 100px 0; +} +#team h4 { + margin: 5px 0; +} +#team .team-img { + width: 240px; +} +#team .thumbnail { + background: transparent; + border: 0; +} +#team .thumbnail .caption { + padding: 10px 0 0; + color: #888; +} +/* Contact Section */ +#contact { + padding: 100px 0 60px; + background: linear-gradient(to right, #6372ff 0%, #5ca9fb 100%); + color: rgba(255, 255, 255, 0.75); +} +#contact .section-title { + margin-bottom: 40px; +} +#contact .section-title p { + font-size: 16px; +} +#contact h2 { + color: #fff; + margin-top: 10px; + margin-bottom: 15px; + padding-bottom: 15px; +} +#contact .section-title h2::after { + position: absolute; + content: ""; + background: rgba(255, 255, 255, 0.3); + height: 4px; + width: 60px; + bottom: 0; + left: 30px; +} +#contact h3 { + color: #fff; + margin-top: 80px; + margin-bottom: 25px; + padding-bottom: 20px; + font-weight: 400; +} +#contact form { + padding-top: 20px; +} +#contact .text-danger { + color: #cc0033; + text-align: left; +} +#contact .btn-custom { + margin: 30px 0; + background: transparent; + border: 2px solid #fff; +} +#contact .btn-custom:hover { + color: #1f386e; + background: #fff; +} +label { + font-size: 12px; + font-weight: 400; + font-family: "Open Sans", sans-serif; + float: left; +} +#contact .form-control { + display: block; + width: 100%; + padding: 6px 12px; + font-size: 16px; + line-height: 1.42857143; + color: #444; + background-color: #fff; + background-image: none; + border: 1px solid #ddd; + border-radius: 0; + -webkit-box-shadow: none; + box-shadow: none; + -webkit-transition: none; + -o-transition: none; + transition: none; +} +#contact .form-control:focus { + border-color: #999; + outline: 0; + -webkit-box-shadow: transparent; + box-shadow: transparent; +} +.form-control::-webkit-input-placeholder { + color: #777; +} +.form-control:-moz-placeholder { + color: #777; +} +.form-control::-moz-placeholder { + color: #777; +} +.form-control:-ms-input-placeholder { + color: #777; +} +#contact .contact-item { + margin: 20px 0; +} +#contact .contact-item span { + color: rgba(255, 255, 255, 1); + margin-bottom: 10px; + display: block; +} +#contact .contact-item i.fa { + margin-right: 10px; +} +#contact .social { + border-top: 1px solid rgba(255, 255, 255, 0.15); + padding-top: 50px; + margin-top: 50px; + text-align: center; +} +#contact .social ul li { + display: inline-block; + margin: 0 20px; +} +#contact .social i.fa { + font-size: 22px; + width: 48px; + height: 48px; + padding: 12px 0; + border: 2px solid #fff; + color: #fff; + border-radius: 50%; + transition: all 0.3s; +} +#contact .social i.fa:hover { + color: #608dfd; + background: #fff; +} +/* Footer Section*/ +#footer { + background: #f6f6f6; + padding: 30px 0; +} +#footer p { + color: #888; + font-size: 14px; +} +#footer a { + color: #608dfd; +} +#footer a:hover { + border-bottom: 2px solid #608dfd; +} + +.logo { + width: auto; /* Adjust width automatically */ + height: 50px; /* Inherit the height from the container */ + object-fit: contain; /* Maintain the aspect ratio of the image */ + transform: translateY(-12px); + border-radius: 10px; +} + +@media (max-width: 768px) { + #about img { + margin: 50px 0; + } +} diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..ab0bb1b --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = built_with_sphinx + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/built_with_sphinx/doctrees/citation/index.doctree b/docs/built_with_sphinx/doctrees/citation/index.doctree new file mode 100644 index 0000000..2a51cdc Binary files /dev/null and b/docs/built_with_sphinx/doctrees/citation/index.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/automating_documentation_flow.doctree b/docs/built_with_sphinx/doctrees/contributing_info/automating_documentation_flow.doctree new file mode 100644 index 0000000..3f70ed4 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/automating_documentation_flow.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/coding_standards.doctree b/docs/built_with_sphinx/doctrees/contributing_info/coding_standards.doctree new file mode 100644 index 0000000..ccf06b0 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/coding_standards.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/contribute_index.doctree b/docs/built_with_sphinx/doctrees/contributing_info/contribute_index.doctree new file mode 100644 index 0000000..2d84ae9 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/contribute_index.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/contributing_to_FlowVerse.doctree b/docs/built_with_sphinx/doctrees/contributing_info/contributing_to_FlowVerse.doctree new file mode 100644 index 0000000..1ad55d3 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/contributing_to_FlowVerse.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/contributing_to_aiFlows.doctree b/docs/built_with_sphinx/doctrees/contributing_info/contributing_to_aiFlows.doctree new file mode 100644 index 0000000..a37cdb2 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/contributing_to_aiFlows.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/finding_collaborators.doctree b/docs/built_with_sphinx/doctrees/contributing_info/finding_collaborators.doctree new file mode 100644 index 0000000..cfa8fa8 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/finding_collaborators.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/index.doctree b/docs/built_with_sphinx/doctrees/contributing_info/index.doctree new file mode 100644 index 0000000..c07dc70 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/index.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/license_info.doctree b/docs/built_with_sphinx/doctrees/contributing_info/license_info.doctree new file mode 100644 index 0000000..a3cfae6 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/license_info.doctree differ diff --git a/docs/built_with_sphinx/doctrees/contributing_info/recognition_info.doctree b/docs/built_with_sphinx/doctrees/contributing_info/recognition_info.doctree new file mode 100644 index 0000000..87a9da9 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/contributing_info/recognition_info.doctree differ diff --git a/docs/built_with_sphinx/doctrees/environment.pickle b/docs/built_with_sphinx/doctrees/environment.pickle new file mode 100644 index 0000000..6de1d68 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/environment.pickle differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Quick_Start/quick_start.doctree b/docs/built_with_sphinx/doctrees/getting_started/Quick_Start/quick_start.doctree new file mode 100644 index 0000000..ca55e9d Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Quick_Start/quick_start.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/atomic_flow.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/atomic_flow.doctree new file mode 100644 index 0000000..fd006ba Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/atomic_flow.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/autogpt_tutorial.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/autogpt_tutorial.doctree new file mode 100644 index 0000000..ae2b419 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/autogpt_tutorial.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/composite_flow.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/composite_flow.doctree new file mode 100644 index 0000000..c995fa5 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/composite_flow.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.doctree new file mode 100644 index 0000000..68f65ce Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/reAct.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/reAct.doctree new file mode 100644 index 0000000..eb7cc23 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/reAct.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/reActwHumanFeedback.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/reActwHumanFeedback.doctree new file mode 100644 index 0000000..1bfb0fc Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/reActwHumanFeedback.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/setting_up_aiFlows.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/setting_up_aiFlows.doctree new file mode 100644 index 0000000..cd8721b Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/setting_up_aiFlows.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/Tutorial/tutorial_landing_page.doctree b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/tutorial_landing_page.doctree new file mode 100644 index 0000000..7ac3c1f Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/Tutorial/tutorial_landing_page.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/autogpt.doctree b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/autogpt.doctree new file mode 100644 index 0000000..9b36f98 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/autogpt.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/chat_flow.doctree b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/chat_flow.doctree new file mode 100644 index 0000000..621624c Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/chat_flow.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/detailed_example_landing_page.doctree b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/detailed_example_landing_page.doctree new file mode 100644 index 0000000..43b69af Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/detailed_example_landing_page.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/vision_flow.doctree b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/vision_flow.doctree new file mode 100644 index 0000000..e0a97f2 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/detailed_examples/vision_flow.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/developer_guide/developper_guide_landing_page.doctree b/docs/built_with_sphinx/doctrees/getting_started/developer_guide/developper_guide_landing_page.doctree new file mode 100644 index 0000000..19e2adc Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/developer_guide/developper_guide_landing_page.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/developer_guide/flow_module_management.doctree b/docs/built_with_sphinx/doctrees/getting_started/developer_guide/flow_module_management.doctree new file mode 100644 index 0000000..390da57 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/developer_guide/flow_module_management.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/developer_guide/typical_developer_workflows.doctree b/docs/built_with_sphinx/doctrees/getting_started/developer_guide/typical_developer_workflows.doctree new file mode 100644 index 0000000..b0c0c7a Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/developer_guide/typical_developer_workflows.doctree differ diff --git a/docs/built_with_sphinx/doctrees/getting_started/index.doctree b/docs/built_with_sphinx/doctrees/getting_started/index.doctree new file mode 100644 index 0000000..a713688 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/getting_started/index.doctree differ diff --git a/docs/built_with_sphinx/doctrees/index.doctree b/docs/built_with_sphinx/doctrees/index.doctree new file mode 100644 index 0000000..c6c73dd Binary files /dev/null and b/docs/built_with_sphinx/doctrees/index.doctree differ diff --git a/docs/built_with_sphinx/doctrees/installation/index.doctree b/docs/built_with_sphinx/doctrees/installation/index.doctree new file mode 100644 index 0000000..fb00071 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/installation/index.doctree differ diff --git a/docs/built_with_sphinx/doctrees/introduction/index.doctree b/docs/built_with_sphinx/doctrees/introduction/index.doctree new file mode 100644 index 0000000..d1e9f57 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/introduction/index.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.backends.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.backends.doctree new file mode 100644 index 0000000..bd3cdc1 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.backends.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.base_flows.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.base_flows.doctree new file mode 100644 index 0000000..016bfc7 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.base_flows.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.data_transformations.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.data_transformations.doctree new file mode 100644 index 0000000..5ac0574 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.data_transformations.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.datasets.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.datasets.doctree new file mode 100644 index 0000000..1e2dd80 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.datasets.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.doctree new file mode 100644 index 0000000..40dc3f8 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.flow_cache.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.flow_cache.doctree new file mode 100644 index 0000000..4d4b019 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.flow_cache.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.flow_launchers.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.flow_launchers.doctree new file mode 100644 index 0000000..b410b55 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.flow_launchers.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.flow_verse.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.flow_verse.doctree new file mode 100644 index 0000000..948e20d Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.flow_verse.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.history.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.history.doctree new file mode 100644 index 0000000..e8a8c68 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.history.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.interfaces.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.interfaces.doctree new file mode 100644 index 0000000..434041a Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.interfaces.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.messages.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.messages.doctree new file mode 100644 index 0000000..47cf4fe Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.messages.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.prompt_template.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.prompt_template.doctree new file mode 100644 index 0000000..d175775 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.prompt_template.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/aiflows.utils.doctree b/docs/built_with_sphinx/doctrees/source/aiflows.utils.doctree new file mode 100644 index 0000000..e3b6a78 Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/aiflows.utils.doctree differ diff --git a/docs/built_with_sphinx/doctrees/source/modules.doctree b/docs/built_with_sphinx/doctrees/source/modules.doctree new file mode 100644 index 0000000..4ebfb1c Binary files /dev/null and b/docs/built_with_sphinx/doctrees/source/modules.doctree differ diff --git a/docs/built_with_sphinx/html/.buildinfo b/docs/built_with_sphinx/html/.buildinfo new file mode 100644 index 0000000..be9c379 --- /dev/null +++ b/docs/built_with_sphinx/html/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: e09a7edb92542f2c8272758ff2c12d48 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/built_with_sphinx/html/_images/fig1_rounded_corners.png b/docs/built_with_sphinx/html/_images/fig1_rounded_corners.png new file mode 100644 index 0000000..177c3e3 Binary files /dev/null and b/docs/built_with_sphinx/html/_images/fig1_rounded_corners.png differ diff --git a/docs/built_with_sphinx/html/_images/logo_text_statement_alt_rounded_corners.png b/docs/built_with_sphinx/html/_images/logo_text_statement_alt_rounded_corners.png new file mode 100644 index 0000000..ef0009c Binary files /dev/null and b/docs/built_with_sphinx/html/_images/logo_text_statement_alt_rounded_corners.png differ diff --git a/docs/built_with_sphinx/html/_images/previous_flows_rounded.png b/docs/built_with_sphinx/html/_images/previous_flows_rounded.png new file mode 100644 index 0000000..a9db02c Binary files /dev/null and b/docs/built_with_sphinx/html/_images/previous_flows_rounded.png differ diff --git a/docs/built_with_sphinx/html/_sources/citation/index.md.txt b/docs/built_with_sphinx/html/_sources/citation/index.md.txt new file mode 100644 index 0000000..7eea4a1 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/citation/index.md.txt @@ -0,0 +1,15 @@ +## Citation + + +To reference the 🤖🌊 **aiFlows** library, for now, please cite the paper [Flows: Building Blocks of Reasoning and Collaborating AI](https://arxiv.org/pdf/2308.01285.pdf): + +``` +@misc{josifoski2023flows, + title={Flows: Building Blocks of Reasoning and Collaborating AI}, + author={Martin Josifoski and Lars Klein and Maxime Peyrard and Yifei Li and Saibo Geng and Julian Paul Schnitzler and Yuxing Yao and Jiheng Wei and Debjit Paul and Robert West}, + year={2023}, + eprint={2308.01285}, + archivePrefix={arXiv}, + primaryClass={cs.AI} +} +``` \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/automating_documentation_flow.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/automating_documentation_flow.rst.txt new file mode 100644 index 0000000..6f8167f --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/automating_documentation_flow.rst.txt @@ -0,0 +1,49 @@ +.. _automating_doc: + +=========================================================== +Automating the documentation of a Flow on the FlowVerse +=========================================================== + +Documenting your Flow is a crucial step in ensuring clarity and accessibility. Let's explore an efficient way to automate this process using pydoc-markdown. + +**1. Document Your Flow in Sphinx Format** +------------------------------------------- +Start by documenting your Flow in `Sphinx format`_. Need a reference? Check out `ChatFlowModule`_ for inspiration. + + Pro tip: Leverage VSCode's GitHub Copilot to expedite the documentation process. + +**2. Install pydoc-markdown** +------------------------------- +Ensure you have the necessary tool installed by running the following command:: + + pip install pydoc-markdown + + +**3. Navigate to Your Flow Directory** +------------------------------------------ +Go to the directory containing your Flow file:: + + cd + + +**4. Build the Markdown** +------------------------------------------ +Generate the Markdown documentation using pydoc-markdown. Replace with the name of your Flow file (excluding the `.py` extension). +For example, if your Flow file is named `Flow1.py`, execute the following command:: + + + pydoc-markdown -p Flow1 --render-toc > README.md + + +If you have multiple Flow files, consider using the following command to include all files in the documentation:: + + + pydoc-markdown -I . --render-toc > README.md + + +------ + +This process automates the generation of Markdown documentation for your Flow, streamlining the contribution process on the FlowVerse. Happy documenting! 🚀✨ + +.. _Sphinx format: https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html +.. _ChatFlowModule: https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.py diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/coding_standards.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/coding_standards.rst.txt new file mode 100644 index 0000000..33fd02a --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/coding_standards.rst.txt @@ -0,0 +1,69 @@ +.. _coding_standards: + +Coding Standards +================ + +When contributing to aiFlows library, it's essential to adhere to the following coding standards to maintain consistency, readability, and the overall quality of the codebase: + +1. Simplicity and Readability +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Strive to make your code as simple and readable as possible. Use clear and meaningful variable/function names, and avoid unnecessary complexity. + +2. Best Practices +^^^^^^^^^^^^^^^^^^^^^^ + +Follow industry best practices when implementing features or fixing bugs. This includes adhering to language-specific conventions and guidelines. + +3. Documentation +^^^^^^^^^^^^^^^^^^^^^^^^ + +Document your code thoroughly. Provide comments where necessary to explain complex logic or algorithms. Use clear and concise language to describe your thought process. + +4. Docstrings in Sphinx Format +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For all new functions and classes, include docstrings in Sphinx format. These docstrings should describe the purpose, parameters, return values, and possibly exceptions raised by the function or class. Here is an example of the docstring of a function in the Sphinx format:: + + def example_function(param1, param2): + """ + Brief description of the function. + + :param param1: Description of the first parameter. + :param param2: Description of the second parameter. + :return: Description of the return value. + :raises CustomException: Description of when this exception is raised. + """ + # Function implementation + return result + +For more details on the Sphinx docstring format check out this link: `Sphinx Docstring Format`_. + +5. Backward Compatibility +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Ensure that your code changes are backward compatible whenever possible. This helps maintain the stability of the library for existing users. + +6. Thorough Testing +^^^^^^^^^^^^^^^^^^^^ + +Create comprehensive tests for your code. Tests should cover various scenarios, including edge cases, to ensure the robustness of your implementation. + +7. Test Coverage +^^^^^^^^^^^^^^^^ + +Try to maintain or increase test coverage when adding new features or modifying existing ones when needed. Aim for a high percentage of code coverage to catch potential issues early. + +8. Feature Tests +^^^^^^^^^^^^^^^^ + +When introducing new features, include corresponding tests. Every feature should have a test, and existing tests should be updated as needed. + + +--------------- + +Your dedication to simplicity, readability, and best practices is greatly appreciated. Your contributions help make the aiFlows library more accessible, robust, and user-friendly for the entire community. + +Once again, thank you for being a valued member of our community and for your commitment to making aiFlows even better. Happy coding! 🚀⭐ + + +.. _Sphinx Docstring Format: https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/contribute_index.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/contribute_index.rst.txt new file mode 100644 index 0000000..66a3c5a --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/contribute_index.rst.txt @@ -0,0 +1,81 @@ +.. _contributing_index: + +Contribution Guide +========================================= + +This guide provides information on how to contribute to the aiFlows. Whether you're interested in coding, documentation, collaboration, fixing bugs or adding features you'll find useful resources here. +If you know what you're looking for, use the table of contents to jump right in. If you're new to aiFlows, start with the **Preface** section below. + +Table of Contents +----------------- +.. toctree:: + :titlesonly: + :glob: + + * + +**Preface** +---------------------------------------------- + Our goal is to make Flows a community-driven project that will benefit researchers and developers alike (see the `Why should I use aiFlows?`_ ) and to achieve this goal, we need your help. + + + You can become a part of the project in a few ways: + + - contribute to the aiFlows codebase: this will directly improve the library and benefit everyone using it + - contribute to the FlowVerse: by making your work accessible to everyone, others might improve your work and build on it, or you can build on others' work + - use the library in your creative projects, push it to its limits, and share your feedback: the proof of the pudding is in the eating, and the best way to identify promising directions, as well as important missing features, is by experimenting + - last but not least, star the repository and shout out aiFlows with your friends and colleagues; spread the word with love + + .. _ + + We will support the community in the best way we can but also lead by example. In the coming weeks, we will share: + + - a roadmap for the library (FlowViz; FlowStudio; improve flexibility, developer experience, and support for concurrency, etc. -- feedback and help would be greatly appreciated!) + - write-ups outlining features, ideas, and our long-term vision for Flows -- we encourage you to pick up any of these and start working on them in whatever way you see fit + - a version of JARVIS -- your fully customizable open-source version of ChatGPT+(++), which we will continue building in public! We hope that this excites you as much as it excites us, and JARVIS will become one of those useful projects that will constantly push the boundaries of what's possible with Flows + + .. _ + + We have tried to find a way for anyone to benefit by contributing to the project. Below we describe the envisioned workflows in more detail (we would love to hear your feedback on this -- the Discord `server `_ already has a channel for it :)). + + In a nutshell, this is just the beginning, and we have a long way to go. Stay tuned, and let's work on a great (open-source) AI future together! + + + + +**Want To Contribute to aiFlows?** +---------------------------------------------- + +Connecting With Like-Minded Contributors & How To Get Help ? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Explore the :ref:`finding_collaborators` section for resources, tips, and guidance on connecting with potential collaborators, sharing project ideas, building your dream team or how to get help. 🚀🌟 + + +Contributing To aiFlows Library: Bug Fixes and Feature Additions Guide +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Contribute to the aiFlows Library! Follow the guidelines in the :ref:`contributing_to_ai_flows` guide for bug fixes and feature additions. + Report issues on GitHub, discuss on Discord, and create pull requests. Your contributions matter! 🚀🌟 + + +Contributing To the FlowVerse: Creating New Flows and Contributing To Existing Flows Guide +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Contribute to the Flows from the FlowVerse! Follow the guidelines in the :ref:`contributing_to_FlowVerse` guide to understand how to create and publish your Flow or contribute to an existing one. 🚀 + +Automating the Generation of FlowCards (README) for the FlowVerse +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Check out the :ref:`automating_doc` guide to speed up the process of creating FlowCards (READMEs) for the FlowVerse. + +Coding Standards for aiFlows +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Review the coding standards for aiFlows Library contributions in the :ref:`coding_standards` guide. Essential guidelines ensuring a high-quality codebase. + Familiarize yourself with these standards before submitting your Pull Request. 🚀⭐ + +Contributors Wall and Sharing/Publicizing Your aiFlows Project or Flow +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Learn about contributor recognition, sharing work on Discord, and the importance of acknowledgment for aiFlows library contributions in the :ref:`recognition_info` guide. Happy contributing! 🚀🌐 + +Licence Information (Spoiler Alert: It’s Open-Source and Completely Free!) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Check out :ref:`license_info` to get quick answers about aiFlows' open-source MIT License, its free-of-charge accessibility, and you can use in commercial projects. Explore how you can contribute to the thriving aiFlows community without any worries about the legal stuff. 🚀🌟 + +.. _Why Should I Use aiFlows?: ../introduction/index.html#why-should-i-use-aiflows diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/contributing_to_FlowVerse.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/contributing_to_FlowVerse.rst.txt new file mode 100644 index 0000000..463a181 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/contributing_to_FlowVerse.rst.txt @@ -0,0 +1,62 @@ +.. _contributing_to_flowVerse: + +Recommended Workflow for Contributing to a Flow on the FlowVerse +================================================================ + +**1. Check Existing Flows & Talk to the Community** +--------------------------------------------------- + +Before initiating a new Flow, take a moment to explore whether a similar Flow already exists. Delve into our vibrant community on 🤲│flow-sharing +in Discord to check for existing Flows or reach out to the community on 🌊🔮│flow-verse. + +If the desired Flow doesn't exist, consider crafting a new post in our Discord's 🤲│flow-sharing. Share detailed information about the Flow you aim to implement +and let the community know about your initiative. 🤲│flow-sharing serves as an excellent platform to engage in discussions, seek feedback, receive assistance, and showcase your Flow. +Utilize this space to not only introduce your idea but also to foster collaboration, gather insights, and promote your Flow within the community. + +**2. Developing Your Flow - Creating or Enhancing for Contribution** +-------------------------------------------------------------------- + +Whether you're cultivating a new Flow or contributing to an existing one, this step is your guide to navigate the intricate pathways of the FlowVerse. +Consider checking out the :typical_developper_workflows: tutorial for examples on how to work on a new or existing Flow from the FlowVerse + +**2.1. Create an Organized Workspace** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Optimize your workflow by following our recommendation to establish a centralized workspace. Create a dedicated folder to house all the flows you plan to interact with in the FlowVerse. +Your structure should look like something like this: + +.. code-block:: bash + + ├── workspace-using-your-flows + └── flow_modules + ├── Flow1 + ├── Flow2 + ├── ... + +**2.2 Leverage the Flow Template** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Consider adopting our `Flow template `_ as a solid foundation for your project. This recommended structure serves as a guide, +enhancing consistency and facilitating a smoother collaborative experience. Also, check the `ChatFlowModule `_ for an example. + +**2.3. Code With Precision** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Embrace our recommended coding standards, which serve as the backbone of a robust project. Strive for code that is not only creative but +also aligns with our :ref:`coding_standards`. This commitment ensures readability, maintainability, and alignment with the broader coding community. + +**2.5. Consider Automation for Documentation (Optional)** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For those inclined towards efficiency, exploring automation for documentation may prove beneficial. Refer to `this tutorial <./automating_documentation_flow.md>`_ for a comprehensive guide on +automating the documentation process—a strategic move for systematic project management. + +**3. Engage in Dialogue on Discord** +------------------------------------- + +Engage in meaningful discussions within the `Discord community `_. Sharing your progress, seeking advice, and actively participating in conversations +not only enhances your project but also contributes to the collaborative ethos of the community. + +--- + +Remember, each contribution, no matter how small, adds to the vibrant tapestry of the FlowVerse. Happy coding! 🚀✨ diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/contributing_to_aiFlows.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/contributing_to_aiFlows.rst.txt new file mode 100644 index 0000000..1fd8b17 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/contributing_to_aiFlows.rst.txt @@ -0,0 +1,116 @@ +.. _contributing_to_ai_flows: + +Contributing to aiFlows Library (for bug fixes and adding features) +====================================================================== + +**Step 1: Identifying and Reporting an Issue / Bug** +------------------------------------------------------- + +**1.1. Check Existing Issues & Talk to the Community** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Before creating a new issue, check if the problem you've encountered already exists. If it does, consider commenting on the existing issue to +provide additional details or express your interest in working on it. + +Community Discussion on Discord: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Additionally, for more immediate interaction and collaboration, you can discuss the issue on the project's `Discord`_ channel. +Join the 💻│developers or 🐛│debugging channels to connect with the community, seek advice, and coordinate efforts. Engaging with the +community on Discord can provide valuable insights and assistance throughout the issue resolution process. + +**1.2. Creating a New Issue** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If the issue doesn't exist, create a new one. Include a clear and concise title, detailed description of the problem, and steps to reproduce it. +Utilize the "Report a Bug" template for bug reports and the "Feature Request" template for suggesting new features. + +**Step 2: Getting Started with a Pull Request (PR)** +---------------------------------------------------------- + +**2.0. Inform the Community** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Comment on the issue you're working on, informing others that you're actively working on a solution. +Provide progress updates if needed. Also, inform the community on our `Discord`_ 🔨│community-projects forum that you're working on it. +Engage with the community, share your ideas, and seek feedback on your pull request. This open communication is crucial not only for +collaboration but also to inform others that you're actively working on the issue. This helps prevent duplicate work and ensures that community members are aware of ongoing efforts, +fostering a collaborative and efficient development environment. + +**2.1. Fork the Repository** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +On the "aiflows" GitHub page, click "Fork" to create a copy of the repository under your GitHub account. + +**2.2. Clone Your Fork** +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Clone the forked repository to your local machine using the following command:: + + git clone https://github.com/your-username/aiflows.git + +**2.3. Create a New Branch** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Create a new branch for your fix or feature:: + + git checkout -b fix-branch + +**Step 3: Coding and Making a Pull Request** +-------------------------------------------- + +**3.1 Make Changes & And adhere to aiFlow's coding practices** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Implement your fix or feature. Follow best practices, and consider the project's :ref:`coding_standards`. + +**3.2. Commit Changes** +^^^^^^^^^^^^^^^^^^^^^^^ + +Commit your changes with clear and descriptive messages:: + + git add . + git commit -m "Fix: Describe the issue or feature" + +**3.3. Push Changes** +^^^^^^^^^^^^^^^^^^^^^^ + +Push your changes to your forked repository:: + + git push origin fix-branch + +**3.4. Create a Pull Request** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +On the GitHub page of your fork, create a new pull request. Ensure you select the appropriate branch in the "base" and "compare" dropdowns. +Make sure to check out this Github tutorial for more details: `Creating a pull request from a fork`_. + +**3.5. Link the pull request to an issue** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In the description or comments of your pull request, reference the issue it addresses. Use the keyword "fixes" followed by the issue number (e.g., "fixes #123"). +This helps in automatically closing the related issue when the pull request is merged. +Check out this Github tutorial for more details: `Linking a pull request to an issue`_. + +**Step 4: Addressing Reviewer Concerns** +----------------------------------------- + +**4.1. Reviewer Feedback** +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Reviewers may suggest changes to your code. Be open to feedback and make necessary adjustments. + +**4.2. Coding Style** +^^^^^^^^^^^^^^^^^^^^^^ + +Ensure your code aligns with the project's coding style. If unsure, refer to the project's documentation or ask for clarification. + +--------------- + +Thank you for considering contributing to the aiFlows library! Your dedication and effort are immensely appreciated. +Contributors like you make a significant impact, and we want to express our gratitude. +Remember, your name will proudly appear on our contributors' wall, showcasing your valuable contributions to the aiFlows project 🚀🔥 + +.. _Creating a pull request from a fork: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork +.. _Linking a pull request to an issue: https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue +.. _Discord: https://discord.gg/yFZkpD2HAh \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/finding_collaborators.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/finding_collaborators.rst.txt new file mode 100644 index 0000000..eac9614 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/finding_collaborators.rst.txt @@ -0,0 +1,38 @@ +.. _finding_collaborators: + +================================ +Looking for Collaborators ? +================================ + +🤝 Seeking Collaborators? If you're on the lookout for a collaborator to tackle an issue or work on a feature, head over to the `👥│flows-friends`_ forum on Discord. +Share your project ideas, highlight your skills, or specify areas where you could use assistance. For more targeted searches, consider posting in specialized channels, +such as the `🔬│research`_ channel if you're seeking a researcher. Your dream team may just be a click away. Let the collaboration begin! 🚀 + +Looking for Collaborators - FAQ +------------------------------- + +**1. I’m Encountering Issues With Debugging. How Can the Community Help?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 🕵️ If you're in need of debugging support, head over to the `🐛│debugging`_ channel on Discord. + Engaging with the community there can provide valuable insights and assistance in resolving your issues. + +**2. Where Can I Get Feedback on My Work?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 📣 For feedback on your work, visit the appropriate `Discord`_ channel based on your project or focus. + For FlowVerse-related projects, check out channels like Flow-sharing or Flow-verse. If you're involved in research, head to the `🔬│research`_ channel. General + development queries can be directed to the developers channel. Community-projects are also a great space for feedback. + +**3. I’m Looking To Brainstorm Ideas. Where Can I Discuss Them With the Community?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 💡 For brainstorming sessions, consider discussing your ideas in channels like Flows Ideas, Flow-verse, or Developers on `Discord`_. + Engaging with the community in these spaces can lead to fruitful discussions and valuable input on your concepts. + +**4. I Don’t Have the Bandwidth/Time To Work on a Project Related to aiFlows and Would Like To Find Somebody To Collaborate With. What Should I Do?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 🤝 If you're seeking collaborators due to time constraints, head to the `👥│flows-friends`_ channel on `Discord`_. Share your project ideas, skills, + and areas where you need assistance. You might find the perfect collaborator who shares your passion and can contribute to your project. + +.. _👥│flows-friends: https://discord.gg/yFZkpD2HAh +.. _🔬│research: https://discord.gg/yFZkpD2HAh +.. _🐛│debugging: https://discord.gg/yFZkpD2HAh +.. _Discord: https://discord.gg/yFZkpD2HAh diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/index.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/index.rst.txt new file mode 100644 index 0000000..260529e --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/index.rst.txt @@ -0,0 +1,31 @@ + +Contribute +========== + + + +Our mission is to make this a community-driven project that will benefit researchers and developers alike +(see the `Why should I use aiFlows?`_ ) and to achieve this goal, we need your help. + +You can become a part of the project in a few ways: + +- contribute to the aiFlows codebase: this will directly improve the library and benefit everyone using it +- contribute to the FlowVerse: by making your work accessible to everyone, others might improve your work and build on it, or you can build on others' work +- use the library in your creative projects, push it to its limits, and share your feedback: the proof of the pudding is in the eating, and the best way to identify promising directions, +as well as important missing features, is by experimenting +- last but not least, ⭐ the repository and 📣 share aiFlows with your friends and colleagues; spread the word ❤️ + +We will support the community in the best way we can but also lead by example. In the coming weeks, we will share: + +- a roadmap for the library (FlowViz; FlowStudio; improve flexibility, developer experience, and support for concurrency, etc. -- feedback and help would be greatly appreciated!) +- write-ups outlining features, ideas, and our long-term vision for Flows -- we encourage you to pick up any of these and start working on them in whatever way you see fit +- a version of JARVIS -- your fully customizable open-source version of ChatGPT+(++), which we will continue building in public! We hope that this excites you as much as it excites us, +and JARVIS will become one of those useful projects that will constantly push the boundaries of what's possible with Flows + +We have tried to find a way for anyone to benefit by contributing to the project. The :ref:`contributing_index` contr describes our envisioned workflow and how you could get +involved in more detail (we would love to hear your feedback on it -- the Discord server already has a channel for it :). + +In a nutshell, this is just the beginning, and we have a long way to go. Stay tuned, and let's work on a great (open-source) AI future together! + + +.. _Why Should I Use aiFlows?: ../introduction/index.html diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/license_info.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/license_info.rst.txt new file mode 100644 index 0000000..7947df0 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/license_info.rst.txt @@ -0,0 +1,40 @@ +.. _license_info: + +Licence Info: Frequently Asked Questions +========================================= + +1. I’m Worried About License Issues. Is aiFlows Open-Source? +------------------------------------------------------------- + +Absolutely! aiFlows is proudly open-source, and it operates under the MIT License. + +**MIT License:** The MIT License is a permissive open-source license that grants you the freedom to use, modify, and distribute aiFlows without any restrictions. +It encourages collaboration and community contribution. + +2. Is There Any Catch? Do I Have To Pay Anything? +-------------------------------------------------- + +Not at all! aiFlows is free to use, and there's no need to worry about hidden fees. +It's a library designed to make development, research, and the creation of structured interactions seamless and accessible. + +3. Can I Use aiFlows in Commercial Projects? +---------------------------------------------- + +Yes, you can! The MIT License allows you to use aiFlows in both open-source and commercial projects. +Feel free to incorporate aiFlows into your endeavors, whether they are for research, development, or commercial applications. + +4. Are There Any Restrictions on How I Can Use aiFlows? +-------------------------------------------------------- + +Nope! The MIT License provides you with considerable freedom. You can use aiFlows in any way you see fit, modify it according to your needs, +and integrate it into your projects without worrying about restrictive conditions. + +5. How Can I Contribute to aiFlows? +------------------------------------ + +Contributions are highly welcome! Whether it's bug fixes, new features, or improvements, the community thrives on collaboration. Head over to the Contribution Guidelines to +understand how you can actively participate in making aiFlows even better. + +------ + +Remember, aiFlows is here to empower your projects and initiatives without any catches. Your contributions and engagement with the community are what make aiFlows flourish. Happy coding! 🚀✨ diff --git a/docs/built_with_sphinx/html/_sources/contributing_info/recognition_info.rst.txt b/docs/built_with_sphinx/html/_sources/contributing_info/recognition_info.rst.txt new file mode 100644 index 0000000..7243ef4 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/contributing_info/recognition_info.rst.txt @@ -0,0 +1,29 @@ +.. _recognition_info: + +Publicizing Your Work +===================== + +1. Do Contributors to aiFlows’ Codebase Appear on the Contributors Wall in the Library’s Next Release? +------------------------------------------------------------------------------------------------------ + +Absolutely! Contributors to aiFlows automatically earn a spot on the contributors' wall in the README section of the library's next release. Your efforts are recognized and celebrated as part of the growing community. + +2. How Can I Share My Work With the Community? +------------------------------------------------ + +Sharing your work is highly encouraged! Here are some channels on `Discord `_ to consider: + +- **For Flows On The FlowVerse:** Utilize the 🤲│flow-sharing channel and the 🔨│community-projects forum on Discord. + +- **For Contributions To aiFlows Library:** Engage with the community in the 🔨│community-projects channels. + +- **For Research Contributions:** Share your findings on the 🔬│research channel or explore opportunities in 🔨│community-projects. + +3. Are Contributors Cited for Their Contributions to Research? +------------------------------------------------------------------------- + +Absolutely. Proper recognition is key. Contributors to projects and research are, and should always be, acknowledged and cited for their valuable contributions. This not only honors your work but also builds a culture of respect and collaboration within the community. + + + +Remember, your contributions matter, and sharing your work not only benefits you but also enriches the entire aiFlows community. Happy contributing! 🚀🌐 diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Quick_Start/quick_start.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Quick_Start/quick_start.md.txt new file mode 100644 index 0000000..5bfce8d --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Quick_Start/quick_start.md.txt @@ -0,0 +1,157 @@ +# Quick Start + +Welcome to the exciting world of aiFlows! 🚀 + +This tutorial will guide you through your first inference runs with different Flows from the FlowVerse for the task of question answering (QA) as an example. In the process, you'll get familiar with the key aspects of the library and experience how, thanks to the modular abstraction and FlowVerse, we can trivially switch between very different pre-implemented question-answering Flows! + +The guide is organized in two sections: +1. [Section 1:](#section-1-running-your-first-qa-flow-using-a-flow-from-the-flowverse) Running your first QA Flow using a Flow from the FlowVerse 🥳 +2. [Section 2:](#section-2-flowverse-playground-notebook) FlowVerse Playground Notebook + + +## Section 1: Running your First QA Flow using a Flow from the FlowVerse + +#### By the Tutorial's End, I Will Have... +* Learned how to pull Flows from the FlowVerse +* Run my first Flow +* Understood how to pass my API information to a Flow + +While, we support many more API providers (including custom ones), for the sake of simplicity, in this tutorial, we will use OpenAI and Azure. + +### Step 1: Pull a Flow From the FlowVerse + +Explore a diverse array of Flows on the FlowVerse here. In this demonstration, we'll illustrate how to use a Flow from the FlowVerse, focusing on the `ChatAtomicFlow` within the `ChatFlowModule`. This versatile Flow utilizes a language model (LLM) via an API to generate textual responses for given textual inputs. It's worth noting the same process described here applies to any available Flow in the FlowVerse (implemented by any member of the community). + +Without further ado, let's dive in! + + + +Concretely, you would use the `sync_dependencies` function to pull the flow definition and its code from the FlowVerse: + +```python +from aiflows import flow_verse +dependencies = [ +{"url": "aiflows/ChatFlowModule", "revision": "main"} +] + +flow_verse.sync_dependencies(dependencies) +``` + +#### External Library Dependencies + + +Each Flow on the FlowVerse should include a `pip_requirements.txt` file for external library dependencies (if it doesn't have any, the file should be empty). You can check its dependencies on the FlowVerse. In general, if there are any, you need to make sure to install them. + +As you can see [here](https://huggingface.co/aiflows/ChatFlowModule/blob/main/pip_requirements.txt), the `ChatFlowModule` doesn't have any external dependencies, so we're all set. + +### Step 3: Run the Flow! +After executing `sync_dependencies`, the code implementation of `ChatFlowModule` has been pulled into the local repository. +We can now just import it: +```python +from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow +``` + +Set your API information (copy-paste it): +```python + +#OpenAI backend +api_key = "" # copy paste your api key here +api_information = [ApiInfo(backend_used="openai", api_key=api_key)] + +# Azure backend +# api_key = "" # copy paste your api key here +# api_base = "" # copy paste your api base here +# api_version = "" #copypase your api base here +# api_information = ApiInfo(backend_used = "azure", +# api_base =api_base, +# api_key = api_version, +# api_version = api_version ) +``` +Each flow from the FlowVerse should have a `demo.yaml` file, which is a demo configuration of how to instantiate the flow. + +Load the `demo.yaml` configuration: +```python +from aiflows.utils.general_helpers import read_yaml_file +# get demo configuration +cfg = read_yaml_file("flow_modules/aiflows/ChatFlowModule/demo.yaml") +``` + +An attentive reader might have noticed that the field `flow.backend.api_infos` in `demo.yaml` is set to "???" (see a snippet here below). +```yaml +flow: # Overrides the ChatAtomicFlow config + _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config + + name: "SimpleQA_Flow" + description: "A flow that answers questions." + + # ~~~ Input interface specification ~~~ + input_interface_non_initialized: + - "question" + + # ~~~ backend model parameters ~~ + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? +``` + +The following overwrites the field with your personal API information: +```python +# put the API information in the config +cfg["flow"]["backend"]["api_infos"] = api_information +``` + +Instantiate your Flow: +```python +# ~~~ Instantiate the Flow ~~~ +flow = ChatAtomicFlow.instantiate_from_default_config(**cfg["flow"]) +flow_with_interfaces = { + "flow": flow, + "input_interface": None, + "output_interface": None, +} +``` +Note that `input_interface` and `output_interface` are here to control the data that comes in and out of the flow. In this case, we don't need specific data manipulation, so we will leave to `None`. + +Load some data and run your flow with the `FlowLauncher`: +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "question": "What is the capital of France?"} + +# ~~~ Run the Flow ~~~ +_, outputs = FlowLauncher.launch( + flow_with_interfaces= flow_with_interfaces ,data=data + ) + # ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` +Congratulations! You've successfully run your first question-answering Flow! +___ +You can find this example in [runChatAtomicFlow.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/quick_start/runChatAtomicFlow.py) + +To run it, use the following commands in your terminal (make sure to copy-paste your keys first): +```bash +cd examples/quick_start/ +python runChatAtomicFlow.py +``` + +Upon execution, the result should appear as follows: +```bash +[{'api_output': 'The capital of France is Paris.'}] +``` + +## Section 2: FlowVerse Playground Notebook + +Want to quickly run some Flows from FlowVerse? Check out our jupyter notebook [flow_verse_playground.ipynb](https://github.com/epfl-dlab/aiflows/tree/main/examples/quick_start/flow_verse_playground.ipynb) where you can quicky switch between the following flows from the FlowVerse: + +* [ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule) + +* [ReAct](https://huggingface.co/aiflows/ControllerExecutorFlowModule) + +* [ChatInteractiveFlowModule](https://huggingface.co/aiflows/ChatInteractiveFlowModule) + +* [ChatWithDemonstrationsFlowModule](https://huggingface.co/aiflows/ChatWithDemonstrationsFlowModule) + +* [AutoGPTFlowModule](https://huggingface.co/aiflows/AutoGPTFlowModule) + +* [VisionFlowModule](https://huggingface.co/aiflows/VisionFlowModule) diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/atomic_flow.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/atomic_flow.md.txt new file mode 100644 index 0000000..18a9bb2 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/atomic_flow.md.txt @@ -0,0 +1,117 @@ +# Atomic Flow Tutorial + +This guide presents the concept of an AtomicFlow and is organized into two sections: +1. [Section 1:](#section-1-defining-atomic-flows) Defining Atomic Flows +2. [Section 2:](#section-2-writing-your-first-atomic-flow) Writing Your First Atomic Flow + +### By the Tutorial's End, I Will Have... + +* Gained insight into the relationship among a Flow, an input interface, and an output interface +* Acquired hands-on experience in creating an `AtomicFlow` with the example of `ReverseNumberAtomic` +* Learned how to run a flow with a `FlowLauncher` + +## Section 1: Defining Atomic Flows + +The `AtomicFlow` class is a subclass of `Flow` and corresponds to an Input/Output interface around a tool (note that LLMs are also tools in the Flows framework!). + +In the paper it's defined as such: + +> +> +> An `Atomic Flow` is effectively a minimal wrapper around +> a tool and achieves two things: +> 1. It fully specifies the tool (e.g., the most basic Atomic Flow around +> GPT-4 would specify the prompts and the generation parameters) +> 2. It abstracts the complexity of the internal computation by exposing only a standard message-based interface for exchanging information with other Flows. +> +> + +Examples of Atomic Flows include: +* A wrapper around an LLM ([ChatAtomicFlow](https://huggingface.co/aiflows/ChatFlowModule)) +* A search engine API ([LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule)) +* An interface with a human ([HumanStandardInputFlowModule](https://huggingface.co/aiflows/HumanStandardInputFlowModule) +) + +## Section 2: Writing Your First Atomic Flow + +As a starting example, let's create an Atomic Flow that takes a number and returns its reverse. (e.g., if the input is 1234, it should return 4321) + +The flow configuration, presented as a YAML file, is outlined below (you can also review the configuration in [reverseNumberAtomic.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverseNumberAtomic.yaml)): + +```yaml +name: "ReverseNumber" +description: "A flow that takes in a number and reverses it." + +input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_select: ["number"] + +output_interface: # Connector between the Flow's output and the caller + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + output_number: "reversed_number" # Rename the output_number to reversed_number +``` + +Breaking it down: +- The `name` and `description` parameters are self-explanatory. When defining a Flow you must always define these parameters + +- `input_interface` and `output_interface` define the transformation applied to the input and output data before and after calling the flow. In this case, the `input_interface` ensures the key `number` is in the input data dictionary and passes it to the flow. The `output_interface` renames the key `output_number` to `reversed_number` in the output data dictionary. + +Now let's define the Flow. The class would be implemented as follows (you can also check out the py file [reverse_number_atomic.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverse_number_atomic.py)): +```python +class ReverseNumberAtomicFlow(AtomicFlow): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + # Customize the logic within this function as needed for your specific flow requirements. + def run(self,input_data: Dict[str, Any]) -> Dict[str, Any]: + input_number = input_data["number"] + output_number = int(str(input_number)[::-1]) + response = {"output_number": output_number} + return response +``` +and instantiate the Flow by executing: +```python +overrides_config = read_yaml_file("reverseNumberAtomic.yaml") + +# ~~~ Instantiate the flow ~~~ +flow = ReverseNumberAtomicFlow.instantiate_from_default_config(overrides=overrides_config) +``` +Note that you can also pass a Python dictionary as the `overrides` parameter and not rely on YAML files. + +With all the preparations in place, we can now proceed to invoke our flow and execute it using the `FlowLauncher`. + +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "number": 1234} # This can be a list of samples + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" +_, outputs = FlowLauncher.launch( + flow_with_interfaces={"flow": flow}, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/) and can be executed as follows: + +```bash +cd examples/minimal\ reverse\ number/ +python reverse_number_atomic.py +``` + +Upon running, the answer you should expect is: +```bash +[{'output_number': 4321}] +``` + + +A few other notable examples of an atomic flow include the [HumanStandardInputFlowModule](https://huggingface.co/aiflows/HumanStandardInputFlowModule) and the [FixedReplyFlowModule](https://huggingface.co/aiflows/FixedReplyFlowModule) Flow. +___ + + +**Next Tutorial:** [Composite Flow Tutorial](./composite_flow.md) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/autogpt_tutorial.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/autogpt_tutorial.md.txt new file mode 100644 index 0000000..57937e9 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/autogpt_tutorial.md.txt @@ -0,0 +1,265 @@ +# AutoGPT Tutorial +**Prequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Introducing the FlowVerse with a Simple Q&A Flow Tutorial](./intro_to_FlowVerse_minimalQA.md), [ReAct Tutorial](./reAct.md), [React With Human Feedback Tutorial](./reActwHumanFeedback.md) + +This guide introduces an implementation of the AutoGPT flow. It's organized in two sections: + +1. [Section 1:](#section-1-whats-the-autogpt-flow) What's The AutoGPT flow ? +2. [Section 2:](#section-2-running-the-autogpt-flow) Running the AutoGPT Flow + +### By the Tutorial's End, I Will Have... + +* Acknowledged the differences between AutoGPT and ReActWithHumanFeedback and their implications +* Gained proficiency in executing the AutoGPTFlow +* Enhanced comprehension of intricate flow structures + +## Section 1: What's The AutoGPT flow ? + +In the previous tutorial [React With Human Feedback Tutorial](./reActwHumanFeedback.md), we introduced the `ReActWithHumanFeedback` Flow. Towards the end, while the flow demonstrated effective functionality, we observed a notable challenge, especially in prolonged conversations. The principal issue emerged when attempting to transmit the entire message history to the language model (LLM), eventually surpassing the permissible maximum token limit. As a temporary solution, we opted to send only the first two and the last messages as context to the LLM. However, this approach proves suboptimal if your objective is to enable the model to maintain a more comprehensive long-term memory. Consequently, in this tutorial, we will demonstrate how to create a basic implementation of the `AutoGPT` flow, providing a solution to tackles this issue. + +The `AutoGPT` flow is a circular flow that organizes the problem-solving process into four distinct flows: + +1. `ControllerFlow`: Given an a goal and observations (from past executions), it selects from a predefined set of actions, which are explicitly defined in the `ExecutorFlow`, the next action it should execute to get closer accomplishing its goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow` + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +3. `HumanFeedbackFlow`: This flow prompts the user for feedback on the latest execution of the `ExecutorFlow`. The collected feedback is then conveyed back to the `ControllerFlow` to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the `ReActWithHumanFeedbackFlow` if the user expresses such a preference. + +4. `MemoryFlow`: This flow is used to read and write and read memories stored of passed conversations in a database. These memories can be passed to the `ControllerFlow` enabling it to have a long term memory without having to transmit the entire message history to the language model (LLM). It's implemented with the `VectorStoreFlow` + +Here's a broad overview of the `AutoGPTFlow`: + +``` +| -------> Memory Flow -------> Controller Flow ------->| +^ | +| | +| v +| <----- HumanFeedback Flow <------- Executor Flow <----| +``` + +## Section 2 Running the AutoGPT Flow + +In this section, we'll guide you through running the ReActWithHumanFeedbackFlow. + +For the code snippets referenced from this point onward, you can find them [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/AutoGPT/). + +Now, let's delve into the details without further delay! + +Similar to the [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) tutorial (refer to that tutorial for more insights), we'll start by fetching some flows from the FlowVerse. Specifically, we'll fetch the `AutoGPTFlowModule`, which includes `ControllerFlow`, `ExecutorFlow`, and the `WikiSearchAtomicFlow`. Additionally, we'll fetch the `LCToolFlow`, a flow capable of implementing the DuckDuckGo search flow. + +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/AutoGPTFlowModule", "revision": "main"}, + {"url": "aiflows/LCToolFlowModule", "revision": "main"} +] + +flow_verse.sync_dependencies(dependencies) +``` + +If you've successfully completed the [ReAct Tutorial](./reAct.md), you are likely familiar with the fact that each flow within the FlowVerse is accompanied by a `pip_requirements.txt` file detailing external library dependencies. To further explore this, examine the [pip_requirements.txt for the LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/pip_requirements.txt), and the [pip_requirements.txt for the AutoGPTFlowModule](https://huggingface.co/aiflows/AutoGPTFlowModule/blob/main/pip_requirements.txt). You'll observe the necessity to install the following external libraries if they haven't been installed already: + +```bash +pip install duckduckgo-search==3.9.6 +pip install wikipedia==1.4.0 +pip install langchain==0.0.336 +pip install chromadb==0.3.29 +pip install faiss-cpu==1.7.4 +``` + +Now that we've fetched the flows from the FlowVerse and installed their respective requirements, we can start creating our Flow. + +The configuration for our flow is available in [AutoGPT.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/AutoGPT/AutoGPT.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. `AutoGPTFlow`'s default config can be found [here](https://huggingface.co/aiflows/AutoGPTFlowModule/blob/main/AutoGPTFlow.yaml), the `LCToolFlow` default config can be found [here](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/LCToolFlow.yaml) and memory's flow default config `VectorStoreFlow` can be found [here](https://huggingface.co/aiflows/VectorStoreFlowModule/blob/main/VectorStoreFlow.yaml) + +Our focus will be on explaining the modified parameters in the configuration, with reference to the [ReAct With Human Feedback Tutorial](./reActwHumanFeedback.md) Tutorial for unchanged parameters. +Now let's look at the flow's configuration: +```yaml +flow: + _target_: flow_modules.aiflows.AutoGPTFlowModule.AutoGPTFlow.instantiate_from_default_config + max_rounds: 30 +``` +* `_target_`: We're instantiating `AutoGPTFlow` with its default configuration and introducing some overrides, as specified below. +* `max_rounds`: The maximum number of rounds the flow can run for. + +Now let's look at the flow's `subflows_config`, which provides configuration details for ReAct's subflows—`ControllerFlow`, the `ExecutorFlow`, the `HumanFeedbackFlow` and the `MemoryFlow`: +```yaml + ### Subflows specification + subflows_config: + #ControllerFlow Configuration + Controller: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config + commands: + wiki_search: + description: "Performs a search on Wikipedia." + input_args: ["search_term"] + ddg_search: + description: "Query the search engine DuckDuckGo." + input_args: ["query"] + finish: + description: "Signal that the objective has been satisfied, and returns the answer to the user." + input_args: ["answer"] + backend: + api_infos: ??? + human_message_prompt_template: + template: |2- + Here is the response to your last action: + {{observation}} + Here is the feedback from the user: + {{human_feedback}} + input_variables: + - "observation" + - "human_feedback" + input_interface_initialized: + - "observation" + - "human_feedback" + + previous_messages: + last_k: 1 + first_k: 2 +``` +The `ControllerFlow` is identical to `ReActWithHumanFeedback`. +```yaml + #ExecutorFlow Configuration + Executor: + _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config + subflows_config: + wiki_search: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config + ddg_search: + _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + backend: + _target_: langchain.tools.DuckDuckGoSearchRun +``` +The `ExecutorFlow` is identical to `ReActWithHumanFeedback` and `ReAct`. +```yaml + #MemoryFlow Configuration + Memory: + backend: + model_name: none + api_infos: ??? +``` +The `MemoryFlow`, primarily instantiated from [AutoGPT's defaut configuration](https://huggingface.co/aiflows/AutoGPTFlowModule/blob/main/AutoGPTFlow.yaml#L87).Additionally, please refer to the `MemoryFlow`'s [FlowCard](https://huggingface.co/aiflows/VectorStoreFlowModule) for more details. + +With our configuration file in place, we can now proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (commented): + +```python +# ~~~ Set the API information ~~~ +# OpenAI backend +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] +# Azure backend +# api_information = ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") ) + +``` + +Next, load the YAML configuration, insert your API information, and define the `flow_with_interfaces` dictionary as shown below: + +```python +cfg = read_yaml_file(cfg_path) + +# put the API information in the config +cfg["flow"]["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information +cfg["flow"]["subflows_config"]["Memory"]["backend"]["api_infos"] = api_information +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"), + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` +Lastly, execute the flow using the FlowLauncher. +```python +data = { + "id": 0, + "goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?", +} +# At first, we retrieve information about Michael Jordan the basketball player +# If we provide feedback, only in the first round, that we are not interested in the basketball player, +# but the statistician, and skip the feedback in the next rounds, we get the correct answer + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, + data=data, + path_to_output_file=path_to_output_file, +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/AutoGPT/) and can be executed as follows: + +```bash +cd examples/AutoGPT +python run.py +``` + +Upon execution, you will be prompted for feedback on the Executor's answer. The interaction will resemble the following: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Jordan'} + +== Result +{'wiki_content': 'Michael Jeffrey Jordan (born February 17, 1963), also known by his initials MJ, is an American businessman and former professional basketball player. His profile on the official National Basketball Association (NBA) website states that "by acclamation, Michael Jordan is the greatest basketball player of all time." He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls. He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels. As a freshman, he was a member of the Tar Heels\' national championship team in 1982. Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game\'s best defensive players. His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames "Air Jordan" and "His Airness". Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat. Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season. He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards. During his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan\'s individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award. He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game). In 1999, he was named the 20th century\'s greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press\' list of athletes of the century. Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men\'s Olympic basketball team ("The Dream Team"). He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Ha'} + +[2023-12-06 09:30:40,844][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` + +You can respond with: + +``` +No I'm talking about Michael Irwin Jordan. I think he's a statistician. Maybe look him up on wikipedia? +``` + +Subsequently, ReAct will provide a response similar to this: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Irwin Jordan'} + +== Result +{'wiki_content': 'Michael Irwin Jordan (born February 25, 1956) is an American scientist, professor at the University of California, Berkeley and researcher in machine learning, statistics, and artificial intelligence.Jordan was elected a member of the National Academy of Engineering in 2010 for contributions to the foundations and applications of machine learning.\nHe is one of the leading figures in machine learning, and in 2016 Science reported him as the world\'s most influential computer scientist.In 2022, Jordan won the inaugural World Laureates Association Prize in Computer Science or Mathematics, "for fundamental contributions to the foundations of machine learning and its application."\n\n\n== Education ==\nJordan received his BS magna cum laude in Psychology in 1978 from the Louisiana State University, his MS in Mathematics in 1980 from Arizona State University and his PhD in Cognitive Science in 1985 from the University of California, San Diego. At the University of California, San Diego, Jordan was a student of David Rumelhart and a member of the Parallel Distributed Processing (PDP) Group in the 1980s.\n\n\n== Career and research ==\nJordan is the Pehong Chen Distinguished Professor at the University of California, Berkeley, where his appointment is split across EECS and Statistics. He was a professor at the Department of Brain and Cognitive Sciences at MIT from 1988 to 1998.In the 1980s Jordan started developing recurrent neural networks as a cognitive model. In recent years, his work is less driven from a cognitive perspective and more from the background of traditional statistics.\nJordan popularised Bayesian networks in the machine learning community and is known for pointing out links between machine learning and statistics. He was also prominent in the formalisation of variational methods for approximate inference and the popularisation of the expectation–maximization algorithm in machine learning.\n\n\n=== Resignation from Machine Learning ===\nIn 2001, Jordan and others resigned from the editorial board of the journal Machine Learning. In a public letter, they argued for less restrictive access and pledged support for a new open access journal, the Journal of Machine Learning Research, which was created by Leslie Kaelbling to support the evolution of the field of machine learning.\n\n\n=== Honors and awards ===\nJordan has received numerous awards, including a best student paper award (with X. Nguyen and M. Wainwright) at the International Conference on Machine Learning (ICML 2004), a best paper award (with R. Jacobs) at the American Control Conference (ACC 1991), the ACM-AAAI Allen Newell Award, the IEEE Neural Networks Pioneer Award, and an NSF Presidential Young Investigator Award. In 2002 he was named an AAAI Fellow "for significant contributions to reasoning under uncertainty, machine learning, and human motor control." In 2004 he was named an IMS Fellow "for contributions to graphical models and machine learning." In 2005 he was named an IEEE Fellow "for '} +[2023-12-06 09:53:52,058][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` +Your subsequent response could be: + +``` +There you go! I think you have it! +``` +Eventually, the flow should terminate and return something similar to: + +``` +[{'answer': 'Michael Jordan is a scientist, professor, and researcher in machine learning, statistics, and artificial intelligence. He was born on February 25, 1956.', 'status': 'finished'}] +``` + +Congratulations you've succesfully run `AutoGPTFlow` ! diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/composite_flow.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/composite_flow.md.txt new file mode 100644 index 0000000..c143bf9 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/composite_flow.md.txt @@ -0,0 +1,178 @@ +# Composite Flow Tutorial +**Prerequisites:** [Atomic Flow Tutorial](./atomic_flow.md) + + +This guide introduces the concept of a composite flow by illustrating the creation of a sequential flow, a specific type of composite flow. The content is structured into two main sections: +1. [Section 1:](#section-1-defining-composite-flows-and-sequential-flows) Defining Composite Flows and Sequential Flows +2. [Section 2:](#section-2-writing-your-first-sequential-flow) Writing Your First Sequential Flow + +### By the Tutorial's End, I Will Have... + +* Gained insights into the concept of a Composite Flow +* Acquired the skills to create a `SequentialFlow` through a toy example +* Developed an understanding of the utility of input and output interfaces in connecting subflows within the flow structure + + +## Section 1: Defining Composite Flows and Sequential Flows + +A `SequentialFlow` entails the sequential execution of a series of flows. It's a subclass of `CompositeFlow`. + +In the paper, a Composite Flow is described as follows: + +> +> +> Composite Flows accomplish more challenging, higher-level goals by leveraging and coordinating +> other Flows. Crucially, thanks to their local state and standardized interface, Composite Flows +> can readily invoke Atomic Flows or other Composite Flows as part of compositional, structured +> interactions of arbitrary complexity. Enabling research on effective patterns of interaction is one of +> the main goals of our work. +> +> + +Therefore, a `SequentialFlow` is a specialized form of `CompositeFlow` that runs Flows sequentially. + +Other types of Composite Flows include: +* `CircularFlow`: A series of flows excuted in a circular fashion (e.g [ReAct](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/)) +* `BranchingFlow`: A series of flows organized in a parallel fashion. The branch (Flow) executed depends on the input of the branching flow (e.g. [BranchingFlow](https://github.com/epfl-dlab/aiflows/tree/main/aiflows/base_flows/branching.py)) + +## Section 2: Writing Your First Sequential Flow + +As an introductory example, let's leverage the atomic flow created in the previous tutorial ([Atomic Flow Tutorial](./atomic_flow.md)) to construct a `SequentialFlow`. This `SequentialFlow` will take a number, reverse it, and then reverse it back again. + +Given the input number 1234, the process should unfold as follows: + +```rust +Input | Sequential Flow | Output +------------|--------------------------------------|-------------- + | | +1234 -------|---> Flow1 ---> 4321 ---> Flow2 ------|-----> 1234 + | | + | | +``` + +The flow configuration, presented as a YAML file, is outlined below (you can also review the configuration in [reverseNumberSequential.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverseNumberSequential.yaml)): +```yaml +name: "ReverseNumberTwice" +description: "A sequential flow that reverses a number twice." + +# input and output interfaces of SequentialFlow +input_interface: + - "number" + +output_interface: + - "output_number" + +#configuration of subflows +subflows_config: + first_reverse_flow: + _target_: reverse_number_atomic.ReverseNumberAtomicFlow.instantiate_from_default_config + name: "ReverseNumberFirst" + description: "A flow that takes in a number and reverses it." + second_reverse_flow: + _target_: reverse_number_atomic.ReverseNumberAtomicFlow.instantiate_from_default_config + name: "ReverseNumberSecond" + description: "A flow that takes in a number and reverses it." + +# Define order of execution of subflows and input & output interfaces for proper execution +topology: + #fist flow to execute + - goal: reverse the input number + input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_select: ["number"] + flow: first_reverse_flow + output_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + output_number: first_reverse_output + keys_to_select: ["first_reverse_output"] + reset: false + #second flow to execute + - goal: reverse the output of the first reverse + input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + first_reverse_output: number + keys_to_select: ["number"] + flow: second_reverse_flow + output_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_select: ["output_number"] + reset: false + +``` + +Breaking it down: +* The `name` and `description` parameters are self-explanatory. When defining a Flow you must always define these parameters + +* `input_interface` specifies the expected keys in the input data dictionary passed to the `SequentialFlow` + +* `output_interface` outlines the expected keys in the output data dictionary produced by the `SequentialFlow` + +* In the `subflows_config`, the specification of flows constituating the `SequentialFlow` are detailed. Each subflow is articulated as a key-item pair within a dictionary. The key denotes the name assigned to the subflow, while the corresponding item is a dictionary encapsulating the configuration of the subflow. In this instance, subflows are outlined with their default configuration, incorporating overrides for the `name` and `description` of each flow. + +* `topology` defines the order in which flows are executed within our `SequentialFlow`. +It also specifies the input and output interfaces for each flow. The fields in topology include: + * `goal`: A description of the objective of the flow at the given execution step. + * `flow`: The name of the flow to be invoked, matching the name defined in `subflows_config`. + * `input_interface`: Specifies the transformation to the input data + dictionary before passing it to the current subflow. + * `output_interface`: Specifies the transformation to the output data dictionary + before passing it to the next subflow. + * `reset`: Determines whether to reset the state and history of the flow after calling it (i.e., deletes all message history and key-value pairs (cache) saved in the flow state). + + +Note the importance of the transformations defined in the `input_interface` and `output_interface` +within the `topology`. These transformations play a crucial role in establishing a connection +between the two flows. Specifically, the `input_interface` of the `second_reverse_flow` includes a transformation +that renames the dictionary key `first_reverse_output`, which is passed by the `first_reverse_flow`, to `number`. +This ensures proper key naming and enables the seamless execution of the subsequent flow. + +Now let's instantiate the `SequentialFlow` (you can also check out the py file +[reverse_number_sequential.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverse_number_sequential.py)): + +```python +cfg_path = os.path.join(root_dir, "reverseNumberSequential.yaml") +cfg = read_yaml_file(cfg_path) + +# ~~~ Instantiate the flow ~~~ +flow = SequentialFlow.instantiate_from_default_config(**cfg) +``` + +There is no need to define any new class +since the `SequentialFlow` is a [base_flow](https://github.com/epfl-dlab/aiflows/tree/main/aiflows/base_flows/sequential.py) (meaning it's already defined in the aiFlows library) and we've already +defined the `ReverseNumberAtomicFlow` in the previous tutorial ([Atomic Flow Tutorial](./atomic_flow.md)) + +With all the preparations in place, we can now proceed to invoke our flow and execute it using the `FlowLauncher`. + +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "number": 1234} # This can be a list of samples + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" +_, outputs = FlowLauncher.launch( + flow_with_interfaces={"flow": flow}, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/) and can be executed as follows: + +```bash +cd examples/minimal\ reverse\ number/ +python reverse_number_sequential.py +``` + +Upon running, the answer you should expect is: +``` +[{'output_number': 1234}] +``` +___ + + +**Next Tutorial:** [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.md.txt new file mode 100644 index 0000000..68a950c --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.md.txt @@ -0,0 +1,229 @@ + +# Introducing the FlowVerse with a Simple Q&A Flow +**Prerequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Atomic Flow Tutorial](./atomic_flow.md) + +This guide introduces the FlowVerse via an example: [minimalQA](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/). The guide is organized in two sections: +1. [Section 1:](#section-1-whats-the-flowverse) What's the FlowVerse? +2. [Section 2:](#section-2-crafting-a-simple-qa-flow-with-the-chatflowmodule) Crafting a Simple Q&A Flow with the ChatFlowModule + +### By the Tutorial's End, I Will Have... + +* Gained an understanding of the FlowVerse and its significance +* Acquired the skills to retrieve flows from the FlowVerse +* Successfully developed my initial flow by incorporating a FlowVerse flow +* Created a Simple Q&A flow capable of managing user-assistant interactions with a Language Model (LLM) through an API +* Familiarized myself with the fundamental parameters of the `ChatAtomicFlow` + +## Section 1: What's the FlowVerse ? +The FlowVerse is the hub of flows created and shared by our amazing community for everyone to use! These flows are usually shared on Hugging Face with the intention of being reused by others. Explore our Flows on the FlowVerse [here](https://huggingface.co/aiflows)! + +## Section 2: Crafting a Simple Q&A Flow with the ChatFlowModule + +In this section, we'll guide you through the creation of a simple Q&A flow — a single user-assitant interaction with a LLM. We'll achieve this by leveraging the `ChatAtomicFlow` from the [ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule) in the FlowVerse. The `ChatAtomicFlow` seamlessly interfaces with an LLM through an API, generating textual responses for textual input. Powered by the LiteLLM library in the backend, `ChatAtomicFlow` supports various API providers; explore the full list [here](https://docs.litellm.ai/docs/providers). + +For an in-depth understanding of `ChatAtomicFlow`, refer to its [FlowCard (README)](https://huggingface.co/aiflows/ChatFlowModule/blob/main/README.md). +Note that all the code referenced from this point onwards can be found [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/) + +Let's dive in without further delay! + +First thing to do is to fetch the `ChatFlowModule` from the FlowVerse (see [run_qa_flow.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/run_qa_flow.py) to see all the code): +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/ChatFlowModule", "revision": "297c90d08087d9ff3139521f11d1a48d7dc63ed4"}, +] +flow_verse.sync_dependencies(dependencies) +``` +Let's break this down: +* `dependencies` is a list of dictionaries (in this case, there's only one) indicating which FlowModules we want to pull from the FlowVerse. The dictionary contains two key-value pairs: + * `url`: Specifies the URL where the flow can be found on Hugging Face. Here, the URL is `aiflows/ChatFlowModule`, where `aiflows` is the name of our organization on Hugging Face (or the username of a user hosting their flow on Hugging Face), and `ChatFlowModule` is the name of the FlowModule containing the `ChatAtomicFlow` on the FlowVerse. Note that the `url` is literally the address of the `ChatFlowModule` on Hugging Face (excluding the https://huggingface.co/). So if you type https://huggingface.co/aiflows/ChatFlowModule in your browser, you will find the Flow. + * `revision`: Represents the revision id (i.e., the full commit hash) of the commit we want to fetch. Note that if you set `revision` to `main`, it will fetch the latest commit on the main branch. + +Now that we've fetched the `ChatAtomicFlowModule` from the FlowVerse, we can start creating our Flow. + +The configuration for our flow is available in [simpleQA.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/simpleQA.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The default configuration can be found [here](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) + +Let's start with the input and output interface: +```yaml +input_interface: # Connector between the "input data" and the Flow + _target_: aiflows.interfaces.KeyInterface + additional_transformations: + - _target_: aiflows.data_transformations.KeyMatchInput # Pass the input parameters specified by the flow + +output_interface: # Connector between the Flow's output and the caller + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + api_output: answer # Rename the api_output to answer +``` +* `input_interface` specifies the expected keys in the input data dictionary passed to our flow. +* `output_interface` outlines the expected keys in the output data dictionary produced by our flow. + +Now let's look at the flow's configuration: +```yaml +flow: # Overrides the ChatAtomicFlow config + _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config + + name: "SimpleQA_Flow" + description: "A flow that answers questions." +``` + +* The `_target_` parameter specifies the instantiation method for our flow. In this instance, we're using it to instantiate the `ChatAtomicFlow` from [its default configuration file](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) +* `name` and `description`: self-explanatory parameters + + +```yaml + # ~~~ Input interface specification ~~~ + input_interface_non_initialized: + - "question" +``` +* The `input_interface_non_initialized` parameter in our configuration specifies the keys expected in the input data dictionary when the `ChatAtomicFlow` is called for the first time (i.e., when the system prompt is constructed). Essentially, it serves a role similar to the regular `input_interface`. The distinction becomes apparent when you require different inputs for the initial query compared to subsequent queries. For instance, in ReAct, the first time you query the LLM, the input is provided by a human, such as a question. In subsequent queries, the input comes from the execution of a tool (e.g. a query to wikipedia). In ReAct's case, these two scenarios are distinguished by `ChatAtomicFlow`'s `input_interface_non_initialized` and `input_interface_initialized` parameters. For this tutorial, as we're creating a simple Q&A flow performing a single user-assistant interaction with an LLM, we never use `input_interface_initialized` (which is why it's not defined in the configuration). + +```yaml + # ~~~ backend model parameters ~~ + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? + model_name: + openai: "gpt-3.5-turbo" + azure: "azure/gpt-4" + + # ~~~ generation_parameters ~~ + n: 1 + max_tokens: 3000 + temperature: 0.3 + + top_p: 0.2 + frequency_penalty: 0 + presence_penalty: 0 +``` +* `backend` is a dictionary containing parameters specific to the LLM. These parameters include: + * `api_infos` Your API information (which will be passed later for privacy reasons). + * `model_name` A dictionary with key-item pairs, where keys correspond to the `backend_used` attribute of the `ApiInfo` class for the chosen backend, and values represent the desired model for that backend. Model selection depends on the provided `api_infos`. Additional models can be added for different backends, following LiteLLM's naming conventions (refer to LiteLLM's supported providers and model names [here](https://docs.litellm.ai/docs/providers)). For instance, with an Anthropic API key and a desire to use "claude-2," one would check Anthropic's model details [here](https://docs.litellm.ai/docs/providers/anthropic#model-details). As "claude-2" is named the same in LiteLLM, the `model_name` dictionary would be updated as follows: + ```yaml + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? + model_name: + openai: "gpt-3.5-turbo" + azure: "azure/gpt-4" + anthropic: "claude-2" + ``` + * `n`,`max_tokens`,`top_p`, `frequency_penalty`, `presence_penalty` are generation parameters for LiteLLM's completion function (refer to all possible generation parameters [here](https://docs.litellm.ai/docs/completion/input#input-params-1)). + + +```yaml + # ~~~ Prompt specification ~~~ + system_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + You are a helpful chatbot that truthfully answers questions. + input_variables: [] + partial_variables: {} + + init_human_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + Answer the following question: {{question}} + input_variables: ["question"] + partial_variables: {} + +``` +* `system_message_prompt_template`: This is the system prompt template passed to the LLM. +* `init_human_message_prompt_template`: This is the user prompt template passed to the LLM the first time the flow is called. It includes the following parameters: + * `template` The prompt template in Jinja format. + * `input_variables` The input variables of the prompt. For instance, in our case, the prompt `template` + is "Answer the following question: {{question}}," and our `input_variables` is "question." Before querying the LLM, the prompt `template` is rendered by placing the input variable "question" in the placeholder "{{question}}" of the prompt `template`. It's worth noting that `input_interface_non_initialized == input_variables`. This alignment is intentional, as they are passed as input_variables to the `init_human_message_prompt_template` to render the `template` + + +Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment): +```python + # ~~~ Set the API information ~~~ +# OpenAI backend + +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] + +# # Azure backend +# api_information = [ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") )] + +# # Anthropic backend +#api_information = [ApiInfo(backend_used= "anthropic",api_key = os.getenv("ANTHROPIC_API_KEY"))] + +``` +Next, load the YAML configuration, insert your API information, and define the `flow_with_interfaces` dictionary: +```python + +cfg_path = os.path.join(root_dir, "simpleQA.yaml") +cfg = read_yaml_file(cfg_path) +# put api information in config (done like this for privacy reasons) +cfg["flow"]["backend"]["api_infos"] = api_information + +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"), + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` +Finally, run the flow with `FlowLauncher`. +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "question": "Who was the NBA champion in 2023?"} # This can be a list of samples + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The full example is available [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/) and can be executed as follows: + +```bash +cd examples/minimal\ QA/ +python run_qa_flow.py +``` + +Upon running, the answer is similar to the following: +```bash +[{'answer': "I'm sorry, but as an AI language model, I don't have access to real-time information or the ability to predict future events. As of now, I cannot provide you with the answer to who the NBA champion was in 2023. I recommend checking reliable sports news sources or conducting an internet search for the most up-to-date information."}] +``` +To learn how to obtain information on the 2023 NBA Champion using Flows, refer to the next tutorial [ReAct](./reAct.md), a Flow that provides `ChatAtomicFlow` to tools like search engines! + +Additionally, the [minimal QA](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/) folder contains other examples using `ChatAtomicFlow` such as: +* Running a [Flow with Demonstrations](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/run_qa_flow_w_demonstrations.py) (encouraging the LLM to finshis answers with "my sire"). To run: + ```bash + cd examples/minimal\ QA/ + python run_qa_flow_w_demonstrations.py + ``` +* Running the [Simple Q&A flow in a multithreaded fashion](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/run_qa_flow_multithreaded.py) in order answer multiple questions with mulitple API_keys or providers. To run: + ```bash + cd examples/minimal\ QA/ + python run_qa_flow_multithreaded.py + ``` +___ + + +**Next Tutorial:** [ReAct Tutorial](./reAct.md) + + + + + diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/reAct.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/reAct.md.txt new file mode 100644 index 0000000..2ffe192 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/reAct.md.txt @@ -0,0 +1,202 @@ +# ReAct Tutorial +**Prequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Introducing the FlowVerse with a Simple Q&A Flow Tutorial](./intro_to_FlowVerse_minimalQA.md), [Atomic Flow Tutorial](./atomic_flow.md), [Composite Flow Tutorial](./composite_flow.md) + +This guide introduces an implementation of the ReAct flow. The guide is organized in two sections: + +1. [Section 1:](#section-1-whats-the-react-flow) What's The ReAct Flow ? +2. [Section 2:](#section-2-running-the-react-flow) Running the ReAct Flow + +### By the Tutorial's End, I Will Have... + +* Gained an understanding of the ReAct flow and its significance +* Acquired the skills to pull multiple flows from the FlowVerse with external library dependencies +* Successfully developed my first personalized ReAct flow +* Familiarized myself with the essential parameters of the `ControllerExecutorFlow` + +## Section 1: What's The ReAct Flow ? + +The ReAct flow, as introduced in [ReAct: Synergizing Reasoning and Acting in Language Models](https://arxiv.org/pdf/2210.03629.pdf), represents a Circular flow that organizes the problem-solving process into two distinct flows: + +1. `ControllerFlow`: With a specified goal and past observations from prior executions, the `ControllerFlow` makes decisions by choosing the next action from a predefined set. These actions are explicitly defined in the `ExecutorFlow` and contribute to progressing towards the defined goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow`. + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +These steps are repeated until an answer is obtained. + +## Section 2: Running The ReAct Flow + +In this section, we'll guide you through running the ReAct Flow. + +For the code snippets referenced from this point onward, you can find them [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/) + +Now, let's delve into the details without further delay! + +Similar to the [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) tutorial (refer to that tutorial for more insights), we'll start by fetching some flows from the FlowVerse. Specifically, we'll fetch the `ControllerExecutorFlowModule`, which includes the `ControllerExecutorFlow` (the composite flow of `ControllerFlow` and `ExecutorFlow`) and the `WikiSearchAtomicFlow`. Additionally, we'll fetch the `LCToolFlow`, a flow capable of implementing the DuckDuckGo search flow. +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/LCToolFlowModule", "revision": "main"}, + {"url": "aiflows/ControllerExecutorFlowModule", "revision": "main"}, +] + +flow_verse.sync_dependencies(dependencies) +``` + +Each flow on the FlowVerse includes a `pip_requirements.txt` file for external library dependencies. Check out the [pip_requirements.txt for the LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/pip_requirements.txt)) and [pip_requirements.txt for the ControllerExecutorFlowModule](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/pip_requirements.txt). You'll notice the need to install the following external libraries: +``` +pip install wikipedia==1.4.0 +pip install langchain==0.0.336 +pip install duckduckgo-search==3.9.6 +``` + +Now that we've fetched the flows from the FlowVerse and installed their respective requirements, we can start creating our flow. + +The configuration for our flow is available in [ReAct.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/ReAct.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The `ControllerExecutorFlow`'s default config can be found [here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerExecutorFlow.yaml) and the `LCToolFlow` default config can be found [here](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/LCToolFlow.yaml). +Now let's look at the flow's configuration: +```yaml +flow: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerExecutorFlow.instantiate_from_default_config + max_rounds: 30 +``` +* The `_target_` parameter specifies the instantiation method for our flow. In this instance, we're using it to instantiate the `ControllerExecutorFlow` from its default configuration file. +* `max_rounds`: The maximum number of rounds the flow can run for. + +Now let's look at the flow's `subflows_config`, which provides configuration details for ReAct's subflows—`ControllerFlow` and the `ExecutorFlow`: +```yaml + ### Subflows specification + subflows_config: + Controller: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config + commands: + wiki_search: + description: "Performs a search on Wikipedia." + input_args: ["search_term"] + ddg_search: + description: "Query the search engine DuckDuckGo." + input_args: ["query"] + finish: + description: "Signal that the objective has been satisfied, and returns the answer to the user." + input_args: ["answer"] + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? + model_name: + openai: "gpt-3.5-turbo" + azure: "azure/gpt-4" +``` +* `Controller`: The configuration of the controller flow: + * `commands`: A dictionary containing the set of actions the `ControllerFlow` can call. Each key of the dictionary is the name of the action it can excute and it's items are a another dictionary containing the following parameters: + * `description`: A description of what the action does (it's important to be clear since these descriptions are passed to the system prompt to explain to the LLM what each action can do) + * `input_args`: The list of arguments required by a given action + * `backend`: The backend used by the `ControllerFlow` (see the previous tutorial [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) for a more detailed description of the backend) +```yaml + Executor: + _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config + subflows_config: + wiki_search: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config + ddg_search: + _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + backend: + _target_: langchain.tools.DuckDuckGoSearchRun + +``` +* `Executor`: The configuration of the `ExecutorFlow`: + * `subflows_config`: The configuration of the subflows of the `ExecutorFlow`. Each subflow corresponds to an action defined in the `ControllerFlow` through the `commands` parameter. It is noteworthy that the names of the `command` keys align with the names of the subflows in the Executor's `subflow_config` + +Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment): + +```python + # ~~~ Set the API information ~~~ +# OpenAI backend +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] +# Azure backend +# api_information = [ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") )] +``` + +Next, load the YAML configuration, insert your API information, +and define the `flow_with_interfaces` dictionary: + +```python +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +root_dir = "." +cfg_path = os.path.join(root_dir, "ReAct.yaml") +cfg = read_yaml_file(cfg_path) +# put the API information in the config +cfg["flow"]["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information + +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"), + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` +Finally, run the flow with `FlowLauncher`. +```python + # ~~~ Get the data ~~~ +# This can be a list of samples +# data = {"id": 0, "goal": "Answer the following question: What is the population of Canada?"} # Uses wikipedia +data = {"id": 0, "goal": "Answer the following question: Who was the NBA champion in 2023?"} + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The full example is available [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/) and can be executed as follows: + +```bash +cd examples/ReAct +python run.py +``` + +Upon execution, the result appears as follows: +```bash +[{'answer': 'The NBA champion in 2023 was the Denver Nuggets.', 'status': 'finished'}] +``` +Finally we have the correct answer! + +However, let's consider a scenario where you want to instruct ReAct: + +> **Answer the following question: What is the profession and date of birth of Michael Jordan?** + +Where Michael Jordan is the Professor of Electrical Engineering and Computer Sciences and Professor of Statistics at Berkley. If you run this with ReAct, the obtained answer might look like this: + +```bash +[{'answer': 'Michael Jordan is a former professional basketball player and an American businessman. He was born on February 17, 1963.', 'status': 'finished'}] +``` +Which is not what we wanted ! This output does not align with our intended question. + +To discover how to retrieve information on Michael Jordan, the Berkeley Professor, using aiFlows, refer to the next tutorial [ReActWithHumanFeedback](./reActwHumanFeedback.md), a flow that incorporates human feedback into the ReAct flow! + +___ + + +**Next Tutorial:** [ReAct With Human Feedback Tutorial](./reActwHumanFeedback.md) + + diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/reActwHumanFeedback.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/reActwHumanFeedback.md.txt new file mode 100644 index 0000000..c240c96 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/reActwHumanFeedback.md.txt @@ -0,0 +1,373 @@ +# ReAct With Human Feedback Tutorial +**Prequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Introducing the FlowVerse with a Simple Q&A Flow Tutorial](./intro_to_FlowVerse_minimalQA.md), [ReAct Tutorial](./reAct.md) + +This guide introduces an implementation of the ReAct flow. It's organized in two sections: + +1. [Section 1:](#section-1-whats-the-react-with-human-feedback-flow) What's The ReAct With Human Feedback Flow ? +2. [Section 2:](#section-2-running-the-react-with-human-feedback-flow) Running the ReAct With Human Feedback Flow + +### By the Tutorial's End, I Will Have... + +* Recognized the distinctions between ReAct and ReActWithHumanFeedback and their consequences +* Learned how to integrate a human feedback flow into ReAct +* Incorporated customized functions into the input and output interfaces. +* Grasped the limitations of ReAct, particularly its lack of long-term memory +* Deepened my understanding of the key parameters in the `ControllerExecutorFlow` configuration + +## Section 1: What's The ReAct With Human Feedback Flow ? + +In the previous tutorial ([ReAct Tutorial](./reAct.md)), we introduced the ReAct flow. We noticed towards the end that, eventhough it works well, it can fail in some situations. For example, consider you ask the following: +> **Answer the following question: What is the profession and date of birth of Michael Jordan?** + + +In scenarios where the mentioned "Michael Jordan" refers to the Professor of Electrical Engineering and Computer Sciences and Professor of Statistics at Berkeley, ReAct may misinterpret it as the basketball player Michael Jordan and provide information about the latter. To address this, we can introduce an additional flow in our circular flow, allowing users to provide feedback on intermediate answers. This tutorial will guide you through the creation of the `ReActWithHumanFeedback` flow to handle such situations. + +The `ReActWithHumanFeedback` flow is a circular flow that organizes the problem-solving process into three distinct flows: + +1. `ControllerFlow`: With a specified goal and past observations from prior executions, the `ControllerFlow` makes decisions by choosing the next action from a predefined set. These actions are explicitly defined in the `ExecutorFlow` and contribute to progressing towards the defined goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow`. + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +3. `HumanFeedbackFlow`: This flow prompts the user for feedback on the latest execution of the `ExecutorFlow`. The collected feedback is then conveyed back to the `ControllerFlow` to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the `ReActWithHumanFeedbackFlow` if the user expresses such a preference. + +## Section 2: Running the ReAct With Human Feedback Flow + +In this section, we'll guide you through running the `ReActWithHumanFeedbackFlow`. + +For the code snippets referenced from this point onward, you can find them [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/). + +Now, let's delve into the details without further delay! + +Similar to the [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) tutorial (refer to that tutorial for more insights), we'll start by fetching some flows from the FlowVerse. Specifically, we'll fetch the `ControllerExecutorFlowModule`, which includes the `ControllerExecutorFlow` (the composite flow of `ControllerFlow` and `ExecutorFlow`) and the `WikiSearchAtomicFlow`. Additionally, we'll fetch the `LCToolFlow`, a flow capable of implementing the DuckDuckGo search flow, and the `HumanStandardInputFlowModule`, a flow capable of gathering human feedback. + +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/ControllerExecutorFlowModule", "revision": "main"}, + {"url": "aiflows/HumanStandardInputFlowModule", "revision": "main"}, + {"url": "aiflows/LCToolFlowModule", "revision": "main"}, +] + +flow_verse.sync_dependencies(dependencies) +``` + +If you've successfully completed the preceding tutorial, [ReAct Tutorial](./reAct.md), you are likely familiar with the fact that each flow within the FlowVerse is accompanied by a `pip_requirements.txt` file detailing external library dependencies. To further explore this, examine the [pip_requirements.txt for the LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/pip_requirements.txt), the [pip_requirements.txt for the ControllerExecutorFlowModule](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/pip_requirements.txt), and the [pip_requirements.txt for the HumanStandardInputFlowModule](https://huggingface.co/aiflows/HumanStandardInputFlowModule/blob/main/pip_requirements.txt). You'll observe the necessity to install the following external libraries if they haven't been installed already: + +```bash +pip install wikipedia==1.4.0 +pip install langchain==0.0.336 +pip install duckduckgo-search==3.9.6 +``` + + +Next, in order to empower the `HumanStandardInputFlow` to terminate the `ReActWithHumanFeedback` flow, it is essential to implement a function in the `ControllerExecutorFlow` class for this specific purpose. Consequently, a new class, `ReActWithHumanFeedback`, is introduced as follows (you can find it in [ReActWithHumandFeedback.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/ReActWithHumanFeedback.py)): + + +```python +from typing import Dict, Any + +from aiflows.base_flows import CircularFlow +from flow_modules.aiflows.ControllerExecutorFlowModule import ControllerExecutorFlow + +class ReActWithHumanFeedback(ControllerExecutorFlow): + @CircularFlow.output_msg_payload_processor + def detect_finish_in_human_input(self, output_payload: Dict[str, Any], src_flow) -> Dict[str, Any]: + human_feedback = output_payload["human_input"] + if human_feedback.strip().lower() == "q": + return { + "EARLY_EXIT": True, + "answer": "The user has chosen to exit before a final answer was generated.", + "status": "unfinished", + } + + return {"human_feedback": human_feedback} +``` +Note that, we've simply added one function to the class which initiates the procedure to terminate the flow should the user enter "q" when prompted for feedback. + +The configuration for our flow is available in [ReActWithHumanFeedback.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/ReActWithHumanFeedback.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The `ControllerExecutorFlow`'s default config can be found [here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerExecutorFlow.yaml) and the `LCToolFlow` default config can be found [here](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/LCToolFlow.yaml). + +Our focus will be on explaining the modified parameters in the configuration, with reference to the previous tutorial for unchanged parameters. +Now let's look at the flow's configuration: +```yaml +max_rounds: 30 +``` +* `max_rounds`: The maximum number of rounds the flow can run for. + +Now let's look at the flow's `subflows_config`, which provides configuration details for ReAct's subflows—`ControllerFlow`, the `ExecutorFlow` and the `HumanFeedbackFlow`: +```yaml +### Subflows specification +subflows_config: + #ControllerFlow + Controller: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config + backend: + api_infos: ??? + commands: + wiki_search: + description: "Performs a search on Wikipedia." + input_args: ["search_term"] + ddg_search: + description: "Query the search engine DuckDuckGo." + input_args: ["query"] + finish: + description: "Signal that the objective has been satisfied, and returns the answer to the user." + input_args: ["answer"] + + human_message_prompt_template: + template: |2- + Here is the response to your last action: + {{observation}} + Here is the feedback from the user: + {{human_feedback}} + input_variables: + - "observation" + - "human_feedback" + input_interface_initialized: + - "observation" + - "human_feedback" + + previous_messages: + first_k: 2 # keep the system prompt and the original goal + last_k: 1 # keep only the last message +``` +Note that the `ControllerFlow` configuration remains nearly identical to that in the previous tutorial, [ReAct Tutorial](./reAct.md). The only differences are: +* The inclusion of an extra argument, "human_feedback," in both the `input_interface_initialized` parameter and the `input_variables` pararameter of the `human_message_prompt_template`. This is to incorporate the human's feedback in the message fed to the `ContollerFlow` +* Implementation of a mechanism to limit the number of `previous_messages` from the flow's chat history that is input to the Language Model (LLM). This limitation is crucial to prevent the Language Model (LLM) from exceeding the maximum token limit. Two parameters are overriden for this purpose: + * `first_k`: Adds the first_k earliest messages of the flow's chat history to the input of the LLM. + * `last_k`: Adds the last_k latest messages of the flow's chat history to the input of the LLM.M + + +```yaml + #ExecutorFlow + Executor: + _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config + subflows_config: + wiki_search: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config + ddg_search: + _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + backend: + _target_: langchain.tools.DuckDuckGoSearchRun +``` +The `ExecutorFlow` is identical to ReAct. +```yaml + HumanFeedback: + _target_: flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config + request_multi_line_input_flag: False + query_message_prompt_template: + template: |2- + Please provide feedback on the last step. + + Relevant information: + == Goal == + {{goal}} + + == Last Command == + {{command}} + + == Args + {{command_args}} + + == Result + {{observation}} + input_variables: + - "goal" + - "command" + - "command_args" + - "observation" + input_interface: + - "goal" + - "command" + - "command_args" + - "observation" +``` +`HumanFeedback`: + * `request_multi_line_input_flag`: This boolean parameter determines whether the user/human is prompted to enter a multi-line input (True) or a single-line input (False). + * `query_message_prompt_template`: This parameter involves a prompt template used to generate the message presented to the human. It includes: + * `template`: The prompt template in Jinja format. + * `input_variables` The input variables of the prompt. Note that these input variables have the same names as the placeholders "{{}}" in the `template`. Before querying the human, the template is rendered by placing the `input_variables` in the placeholders of the `template`. + * `input_interface`: Describes the expected input interface for the flow. It's noteworthy that the `input_interface` is identical to the `input_variables` of the `query_message_prompt_template`. This alignment is intentional, as they are passed as `input_variables` to the `query_message_prompt_template` to render the message presented to the user. + + +```yaml +topology: # The first two are the same as in the ControllerExecutorFlow + - goal: "Select the next action and prepare the input for the executor." + input_interface: + _target_: aiflows.interfaces.KeyInterface + additional_transformations: + - _target_: aiflows.data_transformations.KeyMatchInput + flow: Controller + output_interface: + _target_: ControllerExecutorFlow.detect_finish_or_continue + reset: false + + - goal: "Execute the action specified by the Controller." + input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + command: branch + command_args: branch_input_data + keys_to_select: ["branch", "branch_input_data"] + flow: Executor + output_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + branch_output_data: observation + keys_to_select: ["observation"] + reset: false + + - goal: "Ask the user for feedback." + input_interface: + _target_: aiflows.interfaces.KeyInterface + flow: HumanFeedback + output_interface: + _target_: ReActWithHumanFeedback.detect_finish_in_human_input + reset: false + +``` +The default topology of the `ControllerExecutorFlow` is overriden here: +* For more details on topology, refer to the tutorial [Composite Flow](./composite_flow.md). +* The topology of the `ControllerExecutorFlow`'s default config is available [here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerExecutorFlow.yaml#L36). +* Upon comparison with the default config's topology, one would observe that the sole alteration is the incorporation of the `HumanFeedbackFlow` to the circular flow. +* Note the significance of including the `detect_finish_in_human_input` function from the `ReActWithHumanFeedback` class in the output interface. This function, as defined earlier, plays a crucial role in initiating the process of terminating the flow if the human/user provides "q" as feedback. + +Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment): + +```python +# ~~~ Set the API information ~~~ +# OpenAI backend +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] +# Azure backend +# api_information = ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") ) +```` + +Next, load the YAML configuration, insert your API information, +and define the flow_with_interfaces dictionary: + +```python +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk +root_dir = "." +cfg_path = os.path.join(root_dir, "ReActWithHumanFeedback.yaml") +cfg = read_yaml_file(cfg_path) +# put the API information in the config +cfg["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information +flow = ReActWithHumanFeedback.instantiate_from_default_config(**cfg) + +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": flow, + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` + +Finally, run the flow with FlowLauncher. + +```python + data = { + "id": 0, + "goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?", +} +# At first, we retrieve information about Michael Jordan the basketball player +# If we provide feedback, only in the first round, that we are not interested in the basketball player, +# but the statistician, and skip the feedback in the next rounds, we get the correct answer + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, + data=data, + path_to_output_file=path_to_output_file, +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/) and can be executed as follows: + +```bash +cd examples/ReActWithHumanFeedback +python run.py +``` + +Upon execution, you will be prompted for feedback on the Executor's answer. The interaction will resemble the following: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Jordan'} + +== Result +{'wiki_content': 'Michael Jeffrey Jordan (born February 17, 1963), also known by his initials MJ, is an American businessman and former professional basketball player. His profile on the official National Basketball Association (NBA) website states that "by acclamation, Michael Jordan is the greatest basketball player of all time." He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls. He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels. As a freshman, he was a member of the Tar Heels\' national championship team in 1982. Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game\'s best defensive players. His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames "Air Jordan" and "His Airness". Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat. Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season. He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards. During his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan\'s individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award. He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game). In 1999, he was named the 20th century\'s greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press\' list of athletes of the century. Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men\'s Olympic basketball team ("The Dream Team"). He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Ha'} + +[2023-12-06 09:30:40,844][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` + +You can respond with: + +``` +No I'm talking about Michael Irwin Jordan. I think he's a statistician. Maybe look him up on wikipedia? +``` + +Subsequently, ReAct will provide a response similar to this: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Irwin Jordan'} + +== Result +{'wiki_content': 'Michael Irwin Jordan (born February 25, 1956) is an American scientist, professor at the University of California, Berkeley and researcher in machine learning, statistics, and artificial intelligence.Jordan was elected a member of the National Academy of Engineering in 2010 for contributions to the foundations and applications of machine learning.\nHe is one of the leading figures in machine learning, and in 2016 Science reported him as the world\'s most influential computer scientist.In 2022, Jordan won the inaugural World Laureates Association Prize in Computer Science or Mathematics, "for fundamental contributions to the foundations of machine learning and its application."\n\n\n== Education ==\nJordan received his BS magna cum laude in Psychology in 1978 from the Louisiana State University, his MS in Mathematics in 1980 from Arizona State University and his PhD in Cognitive Science in 1985 from the University of California, San Diego. At the University of California, San Diego, Jordan was a student of David Rumelhart and a member of the Parallel Distributed Processing (PDP) Group in the 1980s.\n\n\n== Career and research ==\nJordan is the Pehong Chen Distinguished Professor at the University of California, Berkeley, where his appointment is split across EECS and Statistics. He was a professor at the Department of Brain and Cognitive Sciences at MIT from 1988 to 1998.In the 1980s Jordan started developing recurrent neural networks as a cognitive model. In recent years, his work is less driven from a cognitive perspective and more from the background of traditional statistics.\nJordan popularised Bayesian networks in the machine learning community and is known for pointing out links between machine learning and statistics. He was also prominent in the formalisation of variational methods for approximate inference and the popularisation of the expectation–maximization algorithm in machine learning.\n\n\n=== Resignation from Machine Learning ===\nIn 2001, Jordan and others resigned from the editorial board of the journal Machine Learning. In a public letter, they argued for less restrictive access and pledged support for a new open access journal, the Journal of Machine Learning Research, which was created by Leslie Kaelbling to support the evolution of the field of machine learning.\n\n\n=== Honors and awards ===\nJordan has received numerous awards, including a best student paper award (with X. Nguyen and M. Wainwright) at the International Conference on Machine Learning (ICML 2004), a best paper award (with R. Jacobs) at the American Control Conference (ACC 1991), the ACM-AAAI Allen Newell Award, the IEEE Neural Networks Pioneer Award, and an NSF Presidential Young Investigator Award. In 2002 he was named an AAAI Fellow "for significant contributions to reasoning under uncertainty, machine learning, and human motor control." In 2004 he was named an IMS Fellow "for contributions to graphical models and machine learning." In 2005 he was named an IEEE Fellow "for '} +[2023-12-06 09:53:52,058][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` +Your subsequent response could be: + +``` +There you go! I think you have it! +``` +Eventually, the flow should terminate and return something similar to: + +``` +[{'answer': 'Michael Jordan is a scientist, professor, and researcher in machine learning, statistics, and artificial intelligence. He was born on February 25, 1956.', 'status': 'finished'}] +``` + +Finally, it provides the correct answer! + + +Nevertheless, persisting with the use of `ReActWithHumanFeedback` may reveal an inherent challenge, particularly in prolonged conversations. The primary issue arises when attempting to pass the entire message history to the language model (LLM), eventually surpassing the maximum token limit allowable. As a workaround, we currently send only the first two and the last messages as context to the LLM. However, this approach is suboptimal if you desire your model to maintain a more comprehensive long-term memory. + +To address this limitation, we recommend exploring the subsequent tutorial, [AutoGPT Tutorial](./autogpt_tutorial.md). This tutorial introduces a fundamental implementation of AutoGPT, enhancing the ReAct flow by incorporating a Memory Flow. This addition tackles the challenge of managing longer conversations. + +___ + + +**Next Tutorial:** [AutoGPT Tutorial](./autogpt_tutorial.md) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/setting_up_aiFlows.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/setting_up_aiFlows.md.txt new file mode 100644 index 0000000..79ca6c2 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/setting_up_aiFlows.md.txt @@ -0,0 +1,96 @@ +# Setting up aiFlows +Welcome to a straightforward tutorial in which we walk you through a suggested setup that will provide you with a smooth and efficient workflow. + + +Let's dive right in. This document is a tutorial for setting up the following: + +1. [Section 1:](#section-1-installing-aiflows) Installing aiFlows +2. [Section 2:](#section-2-setting-up-the-flowverse) Setting Up The FlowVerse +3. [Section 3:](#section-3-setting-up-your-api-keys) Setting Up Your API Keys + + +### By the Tutorial's End, I Will Have... +* Installed the aiFlows library successfully +* Established an organized file structure for seamless collaboration within the FlowVerse +* Set up a Hugging Face account for contribution to the FlowVerse (Optional) +* Configured and activated my API keys + +## Section 1: Installing aiFlows +Begin the installation process for aiFlows with Python 3.10+ using: +```basha +pip install aiflows +``` +Alternatively, for a manual installation: + +```bash +git clone https://github.com/epfl-dlab/aiflows.git +cd aiflows +conda create --name flows python=3.10 +conda activate flows +pip install -e . +``` + +## Section 2: Setting Up The FlowVerse + +### Step 1: Setting up efficient Folder Structure +Create a dedicated folder for the FlowVerse, following our recommended structure: +```bash +mkdir FlowVerse +``` +Following the download of your initial Flows from the FlowVerse, your folder arrangement should look like this: +```bash +|-- YourProject +|-- flow_modules +| |-- Flow1 +| |-- Flow2 +| |-- ... +``` +This ensures all your Flows are conveniently centralized in a single place, simplifying management. + +### Step 2: Optional - Linking Hugging Face for FlowVerse Push + +To facilitate FlowVerse pushing, it's essential to link your Hugging Face account: +1. Begin by creating a [Hugging Face](https://huggingface.co/join) account at huggingface and verify your email. +2. Log in to Hugging Face in the terminal using: + * For terminal login, you'll need an access token. If you haven't already, [created one](https://huggingface.co/settings/tokens) (a read token should be sufficient) + * Enter the following command in the terminal, and when prompted, paste your access token: + ``` + huggingface-cli login + ``` + +This process is essential for the smooth integration of Hugging Face with FlowVerse, ensuring effortless pushing. + +## Section 3: Setting Up Your API Keys + +In this final step, let's configure your API keys as environment variables for your conda environment. We'll demonstrate how to set up keys for both OpenAI and Azure. Note that, thanks to LiteLLM, a variety of providers are available—explore them here: https://docs.litellm.ai/docs/providers + +* If you're using openAI: + * write in your terminal: + ``` + conda env config vars set OPENAI_API_KEY= + ``` + * reactivate your conda environment: + ``` + conda activate + ``` + * To make sure that your key has been set as an environment variable (your environment variables should appear): + ``` + conda env config vars list + ``` +* If you're using Azure: + * write in your terminal: + ``` + conda env config vars set AZURE_OPENAI_KEY= + conda env config vars set AZURE_API_BASE= + conda env config vars set AZURE_API_VERSION= + ``` + * reactivate your conda environment: + ``` + conda activate + ``` + * To make sure that your key has been set as an environment variable (your environment variables should appear): + ``` + conda env config vars list + ``` + +Congratulations! You are now equipped to seamlessly work with aiFlows. Happy flowing! diff --git a/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/tutorial_landing_page.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/tutorial_landing_page.md.txt new file mode 100644 index 0000000..19aefb9 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/Tutorial/tutorial_landing_page.md.txt @@ -0,0 +1,67 @@ +# Tutorials + +Welcome to the exciting world of aiFlows! 🚀 These tutorials are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, **we recommend following the tutorials in the given order**. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path. + +Get ready for an engaging journey where you'll build practical skills and gain a deeper understanding of the power and versatility of aiFlows. + +Let's dive in and explore the following tutorials ! + +## [1. Setting up aiFlows](./setting_up_aiFlows.md) +#### By the Tutorial's End, I Will Have... +* Installed the aiFlows library successfully +* Established an organized file structure for seamless collaboration within the FlowVerse +* Set up a Hugging Face account for contribution to the FlowVerse (Optional) +* Configured and activated my API keys + +## [2. Atomic Flow Tutorial](./atomic_flow.md) + +#### By the Tutorial's End, I Will Have... + +* Gained insight into the relationship among a Flow, an input interface, and an output interface +* Acquired hands-on experience in creating an `AtomicFlow` with the example of `ReverseNumberAtomic` +* Learned how to run a flow with a `FlowLauncher` + +## [3. Composite Flow Tutorial](./composite_flow.md) +#### By the Tutorial's End, I Will Have... + +* Gained insights into the concept of a Composite Flow +* Acquired the skills to create a `SequentialFlow` through a toy example +* Developed an understanding of the utility of input and output interfaces in connecting subflows within the flow structure + +## [4. Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) + +#### By the Tutorial's End, I Will Have... + +* Gained an understanding of the FlowVerse and its significance +* Acquired the skills to retrieve flows from the FlowVerse +* Successfully developed my initial flow by incorporating a FlowVerse flow +* Created a Simple Q&A flow capable of managing user-assistant interactions with a Language Model (LLM) through an API +* Familiarized myself with the fundamental parameters of the `ChatAtomicFlow` + +## [5. ReAct Tutorial](./reAct.md) + +#### By the Tutorial's End, I Will Have... + +* Gained an understanding of the ReAct flow and its significance +* Acquired the skills to pull multiple flows from the FlowVerse with external library dependencies +* Successfully developed my first personalized ReAct flow +* Familiarized myself with the essential parameters of the `ControllerExecutorFlow` + +## [6. ReAct With Human Feedback Tutorial](./reActwHumanFeedback.md) + +#### By the Tutorial's End, I Will Have... + +* Recognized the distinctions between ReAct and ReActWithHumanFeedback and their consequences +* Learned how to integrate a human feedback flow into ReAct +* Incorporated customized functions into the input and output interfaces. +* Grasped the limitations of ReAct, particularly its lack of long-term memory +* Deepened my understanding of the key parameters in the `ControllerExecutorFlow` configuration + + +## [7. AutoGPT Tutorial](./autogpt_tutorial.md) + +#### By the Tutorial's End, I Will Have... + +* Acknowledged the differences between AutoGPT and ReActWithHumanFeedback and their implications +* Gained proficiency in executing the AutoGPTFlow +* Enhanced comprehension of intricate flow structures \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/autogpt.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/autogpt.md.txt new file mode 100644 index 0000000..29fbc24 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/autogpt.md.txt @@ -0,0 +1,312 @@ +# AutoGPT + +## Definition + +The `AutoGPT` flow is a circular flow that organizes the problem-solving process into four distinct subflows: + +1. `ControllerFlow`: Given an a goal and observations (from past executions), it selects from a predefined set of actions, which are explicitly defined in the `ExecutorFlow`, the next action it should execute to get closer accomplishing its goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow` + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +3. `HumanFeedbackFlow`: This flow prompts the user for feedback on the latest execution of the `ExecutorFlow`. The collected feedback is then conveyed back to the `ControllerFlow` to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the `ReActWithHumanFeedbackFlow` if the user expresses such a preference. + +4. `MemoryFlow`: This flow is used to read and write and read memories stored of passed conversations in a database. These memories can be passed to the `ControllerFlow` enabling it to have a long term memory without having to transmit the entire message history to the language model (LLM). It's implemented with the `VectorStoreFlow` + +## Topology + +The sequence of execution for `AutoGPT`'s flows is circular and follows this specific order: + +1. The `MemoryFlow` retrieves relevant information from memory +2. The `ControllerFlow` selects the next action to execute and prepares the input for the `ExecutorFlow` +3. The `ExecutorFlow` executes the action specified by the `ControllerFlow` +4. The `HumanFeedbackFlow` asks the user for feedback +5. The `MemoryFlow` writes relevant information to memory + +Here's a broad overview of the `AutoGPTFlow`: + +``` +| -------> Memory Flow -------> Controller Flow ------->| +^ | +| | +| v +| <----- HumanFeedback Flow <------- Executor Flow <----| +``` + + + +## Subflows + +### Memory Flow + +We utilize the `ChromaDBFlow` from the [VectorStoreFlowModule](https://huggingface.co/aiflows/VectorStoreFlowModule) as the `MemoryFlow`. For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/VectorStoreFlowModule) for an extensive description of its parameters. + +Like every flow, when `ChromaDBFlow`'s `run` is called function is called: + +```python +def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]: + """ This method runs the flow. It runs the ChromaDBFlow. It either writes or reads memories from the database. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The output data of the flow. + :rtype: Dict[str, Any] + """ + api_information = self.backend.get_key() + + if api_information.backend_used == "openai": + embeddings = OpenAIEmbeddings(openai_api_key=api_information.api_key) + else: + # ToDo: Add support for Azure + embeddings = OpenAIEmbeddings(openai_api_key=os.getenv("OPENAI_API_KEY")) + response = {} + + operation = input_data["operation"] + if operation not in ["write", "read"]: + raise ValueError(f"Operation '{operation}' not supported") + + content = input_data["content"] + if operation == "read": + if not isinstance(content, str): + raise ValueError(f"content(query) must be a string during read, got {type(content)}: {content}") + if content == "": + response["retrieved"] = [[""]] + return response + query = content + query_result = self.collection.query( + query_embeddings=embeddings.embed_query(query), + n_results=self.flow_config["n_results"] + ) + + response["retrieved"] = [doc for doc in query_result["documents"]] + + elif operation == "write": + if content != "": + if not isinstance(content, list): + content = [content] + documents = content + self.collection.add( + ids=[str(uuid.uuid4()) for _ in range(len(documents))], + embeddings=embeddings.embed_documents(documents), + documents=documents + ) + response["retrieved"] = "" + + return response +``` +One can notice that `ChromaDBFlow` acts as an encapsulation for chromadb's vector store-backend memory, which offers support for two types of operations: + +- `read`: This operation involves retrieving the `n_results` most relevant documents from ChromaDB based on the provided `content`. +- `write`: This operation is utilised to add the given `content` to VectorDB. + +#### Additional Documentation: + +* To delve into the extensive documentation for `ChromaDBFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/VectorStoreFlowModule) +* Find `ChromaDBFlow`'s default [configuration here](https://huggingface.co/aiflows/VectorStoreFlowModule/blob/main/ChromaDBFlow.yaml) +* For more information on the `chromadb` library, explore its [documentation](https://docs.trychroma.com/) + + + + +### ControllerFlow + +We utilize the `ControllerAtomicFlow` from the [ControllerExecutorFlowModule ](https://huggingface.co/aiflows/ControllerExecutorFlowModule) as the `ControllerFlow`. For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/ControllerExecutorFlowModule) for an extensive description of its parameters. + +`ControllerAtomicFlow`'s `run` function looks like this: + +```python +def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]: + """ This method runs the flow. Note that the response of the LLM is in the JSON format, but it's not a hard constraint (it can hallucinate and return an invalid JSON) + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The output data of the flow (thought, reasoning, criticism, command, command_args) + :rtype: Dict[str, Any] + """ + api_output = super().run(input_data)["api_output"].strip() + response = json.loads(api_output) + return response +``` + +The `run` function is a straightforward wrapper around [ChatAtomicFlow](https://huggingface.co/aiflows/ChatFlowModule). The Language Model (LLM) responds in JSON format, but this isn't strictly enforced—it may occasionally return an invalid JSON. The soft constraint is set by the system prompt, detailed in [its default configuration](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerAtomicFlow.yaml). This configuration specifies the expected output format and describes the available commands it has access to (these are the subflows of the `ExecutorFlow`). The system prompt template is as follows: + +```yaml +system_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + You are a smart AI assistant. + + Your decisions must always be made independently without seeking user assistance. + Play to your strengths as an LLM and pursue simple strategies with no legal complications. + If you have completed all your tasks, make sure to use the "finish" command. + + Constraints: + 1. No user assistance + 2. Exclusively use the commands listed in double quotes e.g. "command name" + + Available commands: + {{commands}} + + Resources: + 1. Internet access for searches and information gathering. + 2. Long Term memory management. + + Performance Evaluation: + 1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities. + 2. Constructively self-criticize your big-picture behavior constantly. + 3. Reflect on past decisions and strategies to refine your approach. + 4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps. + You should only respond in JSON format as described below + Response Format: + { + "thought": "thought", + "reasoning": "reasoning", + "plan": "- short bulleted\n- list that conveys\n- long-term plan", + "criticism": "constructive self-criticism", + "speak": "thoughts summary to say to user", + "command": "command name", + "command_args": { + "arg name": "value" + } + } + Ensure your responses can be parsed by Python json.loads +input_variables: ["commands"] +``` +Where "{{commands}}" is the placeholder for the available commands which are added to the template when the `ControllerAtomicFlow` is being instantiated. + +The goal and observations (from past executions) are passed via the `human_message_prompt` and the `init_human_message_prompt` who are the following: +```yaml +human_message_prompt_template: + template: |2 + Potentially relevant information retrieved from your memory: + {{memory}} + ================= + Here is the response to your last action: + {{observation}} + Here is the feedback from the user: + {{human_feedback}} + input_variables: + - "observation" + - "human_feedback" + - "memory" +input_interface_initialized: + - "observation" + - "human_feedback" + - "memory" +``` + +#### Additional Documentation: + +* To delve into the extensive documentation for `ControllerAtomicFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/ControllerExecutorFlowModule) +* Find `ControllerAtomicFlow`'s default [configuration here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerAtomicFlow.yaml) + + +### ExecutorFlow + +We utilize a [BranchingFlow](https://github.com/epfl-dlab/aiflows/blob/main/aiflows/base_flows/branching.py) from aiFlow's codebase as the `ExecutorFlow`. The `ExecutorFlow` by default has two subflows which are the available commands the `ControllerFlow` can call: + +#### 1. The LCToolFlow + +The `LCToolFlow` is an atomic flow functioning as an interface for LangChain tools. This flow operates by taking a `tool_input`, which corresponds to the tool's keyword arguments, as its input, and then provides the observation as its output. + +```python + def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]: + """ This method runs the flow. It runs the backend on the input data. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The output data of the flow. + :rtype: Dict[str, Any] + """ + observation = self.backend.run(tool_input=input_data) + + return {"observation": observation} +``` + +Using a tool with the `LCToolFlow` is a straightforward process. By setting the desired tool as the backend's `_target_`, you can seamlessly integrate it into your workflow. For a comprehensive list of compatible tools, please refer to the Integrations section in [LangChain's Tool documentation](https://python.langchain.com/docs/modules/agents/tools/). + +```yaml +- _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + overrides: + name: "ddg_search" + backend: + _target_: langchain.tools.DuckDuckGoSearchRun +``` + +#### 2. The WikiSearchAtomicFlow + +The `WikiSearchAtomicFlow` is also atomic flow and functions as an interface for Wikipedia's API. Given a `search_term`, it can execute a search on wikipedia and fetch page summaries to eventually pass it back to the `ControllerFlow` +```python +def run(self, + input_data: Dict[str, Any]) -> Dict[str, Any]: + """ Runs the WikiSearch Atomic Flow. It's used to execute a Wikipedia search and get page summaries. + + :param input_data: The input data dictionary + :type input_data: Dict[str, Any] + :return: The output data dictionary + :rtype: Dict[str, Any] + """ + + # ~~~ Process input ~~~ + term = input_data.get("search_term", None) + api_wrapper = WikipediaAPIWrapper( + lang=self.flow_config["lang"], + top_k_results=self.flow_config["top_k_results"], + doc_content_chars_max=self.flow_config["doc_content_chars_max"] + ) + + # ~~~ Call ~~~ + if page_content := api_wrapper._fetch_page(term): + search_response = {"wiki_content": page_content, "relevant_pages": None} + else: + page_titles = api_wrapper.search_page_titles(term) + search_response = {"wiki_content": None, "relevant_pages": f"Could not find [{term}]. similar: {page_titles}"} + + # Log the update to the flow messages list + observation = search_response["wiki_content"] if search_response["wiki_content"] else search_response["relevant_pages"] + return {"wiki_content": observation} +``` + +#### Additional Documentation: + +* Refer to [LCToolFlow's FlowCard](https://huggingface.co/aiflows/LCToolFlowModule) and [WikiSearchAtomicFlow's FlowCard](https://huggingface.co/aiflows/ControllerExecutorFlowModule) for further documentation + + +### Human Feedback Flow + +We utilize the `HumanStandadInputFlow` from the [HumanStandardInputFlowModule ](https://huggingface.co/aiflows/HumanStandardInputFlowModule) as the `HumanFeedbackFlow`. For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/HumanStandardInputFlowModule) for an extensive description of its parameters. + +Its `run` function enables users to provide feedback at the conclusion of each iteration. This feedback is subsequently appended to the observation generated by the `ExecutorFlow`. By doing so, the feedback becomes part of the memory, thereby influencing the agent's decision-making process. + +```python +def run(self, + input_data: Dict[str, Any]) -> Dict[str, Any]: + """ Runs the HumanStandardInputFlow. It's used to read input from the user/human's standard input. + + :param input_data: The input data dictionary + :type input_data: Dict[str, Any] + :return: The output data dictionary + :rtype: Dict[str, Any] + """ + + query_message = self._get_message(self.query_message_prompt_template, input_data) + state_update_message = UpdateMessage_Generic( + created_by=self.flow_config['name'], + updated_flow=self.flow_config["name"], + data={"query_message": query_message}, + ) + self._log_message(state_update_message) + + log.info(query_message) + human_input = self._read_input() + + return {"human_input": human_input} +``` + +In the current context, if the user enters the command `q`, the flow triggers an early exit by setting the early exit key to `True`, which leads to the termination of the Flow. + +#### Additional Documentation: + +* To delve into the extensive documentation for `HumanStandardInputFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/HumanStandardInputFlow) + diff --git a/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/chat_flow.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/chat_flow.md.txt new file mode 100644 index 0000000..8ad87ff --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/chat_flow.md.txt @@ -0,0 +1,95 @@ +# ChatAtomicFlow + +## Definition + +The `ChatAtomicFlow` is a flow that seamlessly interfaces with an LLM through an API, generating textual responses for textual inputs. Powered by the LiteLLM library in the backend, `ChatAtomicFlow` supports various API providers; explore the full list [here](https://docs.litellm.ai/docs/providers). For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/VectorStoreFlowModule) for an extensive description of its parameters. + +## Methods + + +In this section, we'll explore some o `ChatAtomicFlow`'s methods, specifically those invoked when it is called. + +Just like every flow, `ChatAtomicFlow` is called via the `run` method: + +```python +def run(self,input_data: Dict[str, Any]): + """ This method runs the flow. It processes the input, calls the backend and updates the state of the flow. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The LLM's api output. + :rtype: Dict[str, Any] + """ + + # ~~~ Process input ~~~ + self._process_input(input_data) + + # ~~~ Call ~~~ + response = self._call() + + #loop is in case there was more than one answer (n>1 in generation parameters) + for answer in response: + self._state_update_add_chat_message( + role=self.flow_config["assistant_name"], + content=answer + ) + response = response if len(response) > 1 or len(response) == 0 else response[0] + return {"api_output": response} +``` + +As you can see in the code snippet here above, `run` processes the input data of the flow via the `_process_input` method. Let's take a closer look at what it does: + + +```python +def _process_input(self, input_data: Dict[str, Any]): + """ This method processes the input of the flow. It adds the human message to the flow's state. If the conversation is not initialized, it also initializes it + (adding the system message and potentially the demonstrations). + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + """ + if self._is_conversation_initialized(): + # Construct the message using the human message prompt template + user_message_content = self._get_message(self.human_message_prompt_template, input_data) + + else: + # Initialize the conversation (add the system message, and potentially the demonstrations) + self._initialize_conversation(input_data) + if getattr(self, "init_human_message_prompt_template", None) is not None: + # Construct the message using the query message prompt template + user_message_content = self._get_message(self.init_human_message_prompt_template, input_data) + else: + user_message_content = self._get_message(self.human_message_prompt_template, input_data) + + self._state_update_add_chat_message(role=self.flow_config["user_name"], + content=user_message_content) +``` +This function prepares the user message prompt for submission to the Language Model (LLM) by inserting the `input_data` into the placeholders of the user prompt template (details of which will be explained later). The choice of user prompt sent to the LLM depends on whether the conversation has been initiated or not (i.e., whether the flow has been called): + +- If the conversation has not been initialized, the message is constructed using the `init_human_message_prompt_template`. In this case, the expected input interface for the flow is specified by `input_interface_non_initialized`. + +- If the conversation has been initialized, the message is constructed using the `human_message_prompt_template`. In this case, the expected input interface for the flow is specified by `input_interface_initialized`. + +This distinction proves useful when different inputs are needed for the initial query compared to subsequent queries to the flow. For example, in ReAct, the first query to the LLM is initiated by a human, such as asking a question. In subsequent queries, the input is derived from the execution of a tool (e.g., a query to Wikipedia). In ReAct's implementation, these two scenarios are differentiated by ChatAtomicFlow's `input_interface_non_initialized` and `input_interface_initialized`, which define the input interface for the flow. + +[ChatAtomicFlow's default configuration](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) defines user prompt templates as so: +```yaml +init_human_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + +human_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: "{{query}}" + input_variables: + - "query" +input_interface_initialized: + - "query" +``` +This signifies that `init_human_message_prompt_template` represents an empty string message, while the rendered message for `human_message_prompt_template` is derived from the previous flow's query. This is achieved by placing the input variable "query" (from `input_dict`) into the `{{query}}` placeholder of the prompt template. + +Finally, the `run` function calls the LLM via the LiteLLM library, saves the message in it's flow state and sends the output to the next flow. + +**Additional Documentation:** + +* To delve into the extensive documentation for `ChatAtomicFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/ChatFlowModule) +* Find `ChatAtomicFlow`'s default [configuration here](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/detailed_example_landing_page.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/detailed_example_landing_page.md.txt new file mode 100644 index 0000000..50e8422 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/detailed_example_landing_page.md.txt @@ -0,0 +1,42 @@ +# Detailed Examples + +Welcome to the exciting world of aiFlows! 🚀 These guides are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, **we recommend following the guides in the given order**. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path. + +Get ready for an engaging journey where you'll build practical skills and gain a deeper understanding of the power and versatility of aiFlows. + +Let's dive in and explore the following guides ! + +## [1. ChatAtomicFlow](./chat_flow.md) + +#### By the Guide's End, I Will Have... + +* Gained insight into the execution flow of `ChatAtomicFlow` + +* Acquired an understanding of how `ChatAtomicFlow` processes inputs + +* Identified the documentation resource for `ChatAtomicFlow` + + +## [2. VisionAtomicFlow](./vision_flow.md) + +#### By the Guide's End, I Will Have... + +* Gained insight into the execution flow of `VisionAtomicFlow` + +* Acquired an understanding of how `VisionAtomicFlow` processes inputs + +* Recognized the similarities between `VisionAtomicFlow` and `ChatAtomicFlow` + +* Identified the documentation resource for `VisionAtomicFlow` + +## [3. AutoGPTFlow](./autogpt.md) + +Note: This guide is also useful if you're interested in ReAct since the two share lots of similarities + +#### By the Guide's End, I Will Have... + +* Understood the purpose of `AutoGPTFlow` + +* Explored the functionalities of `AutoGPT`'s subflows + +* Identified the documentation resource for `AutoGPTFlow` and its subflows diff --git a/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/vision_flow.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/vision_flow.md.txt new file mode 100644 index 0000000..7b71a96 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/detailed_examples/vision_flow.md.txt @@ -0,0 +1,106 @@ +# Vision Atomic Flow +**Prequisite**: [Chat Atomic Flow](./chat_flow.md) + +## Definition + +The `VisionAtomicFlow` is a flow that seamlessly interfaces with an LLM through an API, . It is a flow that, given a textual input, and a set of images and/or videos, generates a textual output. Powered by the LiteLLM library in the backend, `VisionAtomicFlow` supports various API providers; explore the full list [here](https://docs.litellm.ai/docs/providers). For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/VisionFlowModule) for an extensive description of its parameters. + +## Methods + +In this section, we'll delve into some of the methods within the `VisionAtomicFlow` class, specifically those invoked when it is called. + +If you examine the [`VisionAtomicFlow` class](https://huggingface.co/aiflows/VisionFlowModule/blob/main/VisionAtomicFlow.py), you'll observe the following: + +1. It's a class that inherits from the `ChatAtomicFlow`. +2. There is no `run` method explicitly defined, and as a result, it shares the same `run` method as `ChatAtomicFlow`, which is the method always called when a flow is invoked. + +Here is the run method of VisionAtomicFlow: +```python +def run(self,input_data: Dict[str, Any]): + """ This method runs the flow. It processes the input, calls the backend and updates the state of the flow. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The LLM's api output. + :rtype: Dict[str, Any] + """ + + # ~~~ Process input ~~~ + self._process_input(input_data) + + # ~~~ Call ~~~ + response = self._call() + + #loop is in case there was more than one answer (n>1 in generation parameters) + for answer in response: + self._state_update_add_chat_message( + role=self.flow_config["assistant_name"], + content=answer + ) + response = response if len(response) > 1 or len(response) == 0 else response[0] + return {"api_output": response} +``` + +In the provided code snippet, observe that the `run` method handles the input data of the flow through the `_process_input` method. Let's delve into a closer examination of its functionality: + + +```python +def _process_input(self, input_data: Dict[str, Any]): + """ This method processes the input data (prepares the messages to send to the API). + + :param input_data: The input data. + :type input_data: Dict[str, Any] + :return: The processed input data. + :rtype: Dict[str, Any] + """ + if self._is_conversation_initialized(): + # Construct the message using the human message prompt template + user_message_content = self.get_user_message(self.human_message_prompt_template, input_data) + + else: + # Initialize the conversation (add the system message, and potentially the demonstrations) + self._initialize_conversation(input_data) + if getattr(self, "init_human_message_prompt_template", None) is not None: + # Construct the message using the query message prompt template + user_message_content = self.get_user_message(self.init_human_message_prompt_template, input_data) + else: + user_message_content = self.get_user_message(self.human_message_prompt_template, input_data) + + self._state_update_add_chat_message(role=self.flow_config["user_name"], + content=user_message_content) +``` + + +When calling `_process_input(input_data)` in `VisionAtomicFlow`, the flow generates its user message prompt similarly to `ChatAtomicFlow` (refer to [ChatAtomicFlow's detailed example](./chat_flow.md)). However, due to a slight modification in the `get_user_message` method compared to `ChatAtomicFlow`, it also includes one or multiple images or videos in the input. + +```python + @staticmethod + def get_user_message(prompt_template, input_data: Dict[str, Any]): + """ This method constructs the user message to be passed to the API. + + :param prompt_template: The prompt template to use. + :type prompt_template: PromptTemplate + :param input_data: The input data. + :type input_data: Dict[str, Any] + :return: The constructed user message (images , videos and text). + :rtype: Dict[str, Any] + """ + content = VisionAtomicFlow._get_message(prompt_template=prompt_template,input_data=input_data) + media_data = input_data["data"] + if "video" in media_data: + content = [ content[0], *VisionAtomicFlow.get_video(media_data["video"])] + if "images" in media_data: + images = [VisionAtomicFlow.get_image(image) for image in media_data["images"]] + content.extend(images) + return content +``` + +Note that images can be passed either via a URL (an image on the internet) or by providing the path to a local image. However, videos must be local videos. + + +Finally, the `run` function calls the LLM via the LiteLLM library, saves the message in it's flow state and sends the textual output to the next flow. + +**Additional Documentation:** + +* To delve into the extensive documentation for `VisionAtomicFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/VisionFlowModule) +* Find `ChatAtomicFlow`'s default [configuration here](https://huggingface.co/aiflows/VisionFlowModule/blob/main/demo.yaml) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/developper_guide_landing_page.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/developper_guide_landing_page.md.txt new file mode 100644 index 0000000..bcc5a71 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/developper_guide_landing_page.md.txt @@ -0,0 +1,27 @@ +# Developer's Guide + +Welcome to the exciting world of aiFlows! 🚀 These guides are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, **we recommend following the tutorials in the given order**. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path. + +Get ready for an engaging journey where you'll build practical skills and gain a deeper understanding of the power and versatility of aiFlows. + +Let's dive in and explore the following guides ! + +## [1. Flow Module Management](./flow_module_management.md) + +#### By the Tutorial's End, I Will Have... + +* Gained a clear understanding of pulling flows from the FlowVerse. + +* Mastered the handling of flows that depend on other flows. + +## [2. Typical Developer Workflows](./typical_developer_workflows.md) + +#### By the Tutorial's End, I Will Have... + +* Learned how to Create a Flow + +* Learned how to Test a Flow + +* Learned how to Publish a Flow + +* Learned how to contributing to an existing flow \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/flow_module_management.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/flow_module_management.md.txt new file mode 100644 index 0000000..6946486 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/flow_module_management.md.txt @@ -0,0 +1,81 @@ +# Flow Module Management + +### By the Tutorial's End, I Will Have... + +* Gained a clear understanding of pulling flows from the FlowVerse. + +* Mastered the handling of flows that depend on other flows. + +## Introduction + +The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, these Flows can be readily downloaded, used, extended or composed into novel, more complex Flows. For the ones using ChatGPT, you could think of them as open-source GPTs(++). + +In the heart of this platform, the community shares their unique Flows, encapsulated in what we call **flow modules**. + +## Flow Modules + +- Each Hugging Face published repository corresponds to a self-contained flow module. For instance, [nbaldwin/ChatInteractiveFlowModule](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule) is a flow module. +- A module may include multiple Flow classes and potentially a default configuration YAML file. In the [nbaldwin/ChatInteractiveFlowModule](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule) module, you can find [ChatHumanFlowModule.py](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule/blob/main/ChatHumanFlowModule.py). +- Each Flow class can depend on other remote, publicly available modules. For example, [ChatHumanFlowModule.py](https://huggingface.co/aiflows/ChatInteractiveFlowModule/blob/main/ChatHumanFlowModule.py) depends on [aiflows/ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule). + +## Syncing Flow Modules + +To use or import a flow module, first sync it to the `flow_modules` directory in your root directory. You can then import it like any local Python package. Consider the following `trivial_sync_demo.py`, which relies on [nbaldwin/ChatFlows](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule): + +```python +dependencies = [ + {"url": "nbaldwin/ChatInteractiveFlowModule", "revision": "main"}, +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.nbaldwin.ChatInteractiveFlowModule import ChatHumanFlowModule + +if __name__ == "__main__": + print("This is a trivial sync demo.") +``` + +This synchronization process, though it may seem unconventional at first, provides a number of advantages: +* The synchronization process examines the implementation of remote flow modules seamlessly, eliminating the need to switch between your integrated development * environment (IDE) and a web page. +* It extends existing implementations effortlessly without the requirement to download or clone the repository manually. + +## Flow Module Namespace + +* Remote flow modules are identified by their Hugging Face repository ID and revision, such as `nbaldwin/ChatInteractiveFlowModule:main`. +* Each locally synchronized flow module manifests as a valid Python package within the `flow_modules` directory, exemplified by structures like `flow_modules.nbaldwin.ChatInteractiveFlowModule`. Importantly, only one revision is retained for each remote flow module, a practice upheld to ensure clarity and manage revision conflicts. Should a conflict arise, a warning will guide you to select the preferred version. + +For a visual representation, consider the following directory structure: + +```shell +(aiflows) ➜ dev-tutorial tree . +. +├── flow_modules +│ ├── aiflows +│ │ └── ChatFlowModule +│ │ ├── ... +│ │ ├── ChatAtomicFlow.py +│ │ ├── ChatAtomicFlow.yaml +│ │ ├── ... +│ │ ├── ... +│ │ └── __pycache__ +│ │ ├── ChatAtomicFlow.cpython-39.pyc +│ │ └── __init__.cpython-39.pyc +│ └── nbaldwin +│ └── ChatInteractiveFlowModule +│ ├── ... +│ ├── ChatHumanFlowModule.py +│ ├── ChatHumanFlowModule.yaml +│ ├── README.md +│ ├── ... +│ └── __pycache__ +│ ├── ChatHumanFlowModule.cpython-39.pyc +│ └── __init__.cpython-39.pyc +└── trivial_sync_demo.py + +9 directories, 16 files +``` +In this illustration, the `nbaldwin/ChatInteractiveFlowModule` flow module relies on the remote flow module `aiflows/ChatAtomicFlow`. Both dependencies are seamlessly synchronized under the flow_modules directory. The synchronization and importation of dependencies mirror each other, ensuring a consistent and logical approach across remote and local development environments. + +____ + +**Next Tutorial**: [Typical Developer Workflows](./typical_developer_workflows.md) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/typical_developer_workflows.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/typical_developer_workflows.md.txt new file mode 100644 index 0000000..ca0f41e --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/developer_guide/typical_developer_workflows.md.txt @@ -0,0 +1,252 @@ +# Typical Developer Workflows +**prerequisites**: [Flow Module Management](./flow_module_management.md) + +## Creating, Testing, and Publishing Your Own Flow Module + +### By the Tutorial's End, I Will Have... + +* Learned how to Create a Flow + +* Learned how to Test a Flow + +* Learned how to Publish a Flow + +* Learned how to contributing to an existing flow + + +### Creating Your Own Flow Module + +To start, create a local directory where you'll develop your flow module: + +```shell +(aiflows) ➜ dev-tutorial mkdir PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots +(aiflows) ➜ dev-tutorial cd PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots +(aiflows) ➜ dev_UsefulChatBots touch __init__.py +(aiflows) ➜ dev_UsefulChatBots touch .gitignore +(aiflows) ➜ dev_UsefulChatBots touch EconomicExpertBot.py +(aiflows) ➜ dev_UsefulChatBots git init +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git add . +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git commit -m "initial commit" +[main (root-commit) e592fd1] initial commit +3 files changed, 0 insertions(+), 0 deletions(-) +create mode 100644 .gitignore +create mode 100644 EconomicExpertBot.py +create mode 100644 __init__.py +``` + +Next, we could either develop from scratch as in [Tutorial for AtomicFlow](../Tutorial/atomic_flow.md) or we could leverage an existing flow module and build upon it. In this tutorial, we'll develop our chatbot based on [aiflows/ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule) thanks to the modularity of Flows: + +```python +dependencies = [ + {"url": "aiflows/ChatFlowModule", "revision": "main"}, +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow + +class EconomicExpertBot(ChatAtomicFlow): + def __init__(self, **kwargs): + super().__init__(**kwargs) +``` + +We recommend to associate your flow with a default yaml file as the default config. This default config will serve as a clear spec of the Flow class. For example, in our case: +```yaml +name: "EconomicExpertBot" +description: "A chatbot which answers questions about the economy." + +input_interface: + - "query" + +output_interface: + - "response" + +system_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + You are an expertise in finance, economy and investment. When you explain something, you always provide associated statistical numbers, source of the information and concrete examples. You tend to explain things in a step-by-step fashion to help the reader to understand. You are also proficient in both English and Chinese. You can answer questions fluently in both languages. + + input_variables: [] +``` + +This explicitly informs potential users about the `input_interface` and `output_interface`, which can be seen as the interface of our Flow. Since we're inheriting from `aiflows/ChatFlowModule.ChatAtomicFlow`, we also inherit the [default config](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) from it. Therefore, our default config can be succinct and only needs to tweak some essential parameters. + +Note that a flow module should ideally be a self-contained python module. Therefore, it's best to use relative import inside your code such that other users can use your flow instantly. + +### Testing Your Own Flow Module + +So far so good, we have created our own flow. Let's now try to test it: + +```python +dependencies = [ + {"url": "yeeef/UsefulChatBots", "revision": "PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots"}, +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +import os + +from flow_modules.yeeef.UsefulChatBots.EconomicExpertBot import EconomicExpertBot +from aiflows.flow_launchers import FlowLauncher + + +if __name__ == "__main__": + # ~~~ Set the API information ~~~ + # OpenAI backend + + api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] + + overrides = { "backend": {"api_infos": : api_information}} + + bot = EconomicExpertBot.instantiate_from_default_config(**overrides) + # the data points in inputs must satisfy the requirements of input_keys + data = [ + { + "id": 0, "query": "What is CPI? What is the current CPI in the US?", + }, + ] + print(f"inputs: {data}") + + # init a minimal flow_launcher without specifying the output_keys, then + # the full output_keys will be given + outputs = FlowLauncher.launch( + flow_with_interfaces={"flow": data}, + data=inputs, + ) + print(outputs) +``` + +As we are developing locally, the remote revision does not exist yet, so we point the revision to the local path we just created: `PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots`. Note that when we sync a local revision, instead of copying the files locally, we make a symbolic soft link. So you could just modify the code under `flow_modules` and the changes will be automatically propagated to the `PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots`. + +We also specify the namespace of our flow module: `yeeef/UsefulChatBots`. yeeef is my HuggingFace username, and you should replace it with your own Hugging Face username. Note that this `url` could be arbitrary as it does not exist online yet, but we highly recommend that the namespace of the flow module be consistent with your HuggingFace username, such that publishing it later will be seamless. + +Then let’s execute the code and test our new flow: + +``` +(aiflows) ➜ dev-tutorial python ask_economic_expert_bot.py +inputs: [{'id': 0, 'query': 'What is CPI? What is the current CPI in the US?'}] +[2023-07-05 17:05:35,530][aiflows.base_flows.abstract][WARNING] - The raw response was not logged. +[{'id': 0, 'inference_outputs': [OutputMessage(message_id='d95683d6-9507-4a90-b290-6a43e609c904', created_at='2023-07-05 09:05:35.530972000', created_by='EconomicExpertBot', message_type='OutputMessage', data={'output_keys': ['response'], 'output_data': {'response': 'CPI, or the Consumer Price Index, is a measure that examines the weighted average of prices of a basket of consumer goods and services, such as transportation, food, and medical care. It is calculated by taking price changes for each item in the predetermined basket of goods and averaging them. Changes in the CPI are used to assess price changes associated with the cost of living.'}, 'missing_output_keys': []}, private_keys=['api_keys'])], 'error': None}] +``` + +Looks good! Now let’s publish it to the huggingface! + +### Publishing Your Flow Module + +Start by creating a new model on Hugging Face and it will be best to allign with the namespace when we are testing: `yeeef/UsefulChatBots`. Then press the botton `Create model`. +aligning it with the namespace used during testing: `yeeef/UsefulChatBots`. Click the `Create model` button to create the model. + +![](https://hackmd.io/_uploads/r1iB4pGFn.png) + +Then, you can either upload the files manually through the Hugging Face webpage or push your changes to the remote: + +```shell +(aiflows) ➜ dev-tutorial cd PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git remote add origin https://huggingface.co/yeeef/UsefulChatBots +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git pull -r origin main +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git push --set-upstream origin main +``` + +Congratulations! You now have your remote module online, available for everyone to use! + + +![](https://hackmd.io/_uploads/HJ4LNafF3.png) + +## Contributing to an Existing Flow + +In this tutorial, we continue to use the `trivial_sync_demo.py` (see [Flow Module Management](./flow_module_management.md)) script. As the dependencies are synced to your root directory, you can instantly modify the synced flow module according to your needs. Once you've made enough changes and feel ready to make a Pull Request (PR), you simply need to push your changes to the Hugging Face repository and create the PR. + +For instance, let's say we want to update the dependency of [nbaldwin/ChatInteractiveFlowModule](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule/tree/main) to the latest version of [aiflows/ChatAtomicFlow](https://huggingface.co/aiflows/ChatFlowModule): + +```python +dependencies = [ + {"url": "aiflows/ChatFlowModule", "revision": "main"} # cae3fdf2f0ef7f28127cf4bc35ce985c5fc4d19a -> main +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow + +class ChatHumanFlowModule(ChatAtomicFlow): + def __init__(self, **kwargs): + + ##SOME CODE +``` + +Firstly, navigate to the synced folder, initialize a git repository, and commit your changes: + +``` +(aiflows) ➜ dev-tutorial cd flow_modules/nbaldwin/ChatInteractiveFlowModule +(aiflows) ➜ ChatInteractiveFlowModule git init +Initialized empty Git repository in /Users/yeeef/Desktop/dlab-ra/dev-tutorial/flow_modules/nbaldwin/ChatInteractiveFlowModule/.git/ +(aiflows) ➜ ChatInteractiveFlowModule git:(main) ✗ git add . +(aiflows) ➜ ChatInteractiveFlowModule git:(main) ✗ git commit -m "Change the dependency revision to main" +[main d7465df] Change the dependency revision to main + 1 file changed, 1 insertion(+), 1 deletion(-) +``` + +Next, you need to open a PR on the target Hugging Face repository. Navigate to `Community` and click on `New pull request`. + +![](https://hackmd.io/_uploads/ry0f4pfF2.png) + + +Enter a brief description for your PR branch and click on `Create PR branch`. + +![](https://hackmd.io/_uploads/S1aQV6fK3.png) + + +Once your PR branch has been created (for instance, `pr/2`), you'll need to push your changes to this branch: + +``` +(aiflows) ➜ ChatInteractiveFlowModule git:(main) git checkout -b pr/2 +Switched to a new branch 'pr/2' +(aiflows) ➜ ChatInteractiveFlowModule git:(pr/2) git remote add origin https://huggingface.co/nbaldwin/ChatInteractiveFlowModule +(aiflows) ➜ ChatInteractiveFlowModule git:(pr/2) git pull -r origin pr/2 +(aiflows) ➜ ChatInteractiveFlowModule git:(pr/2) git push origin pr/2:pr/2 +Enumerating objects: 11, done. +Counting objects: 100% (11/11), done. +Delta compression using up to 10 threads +Compressing objects: 100% (8/8), done. +Writing objects: 100% (8/8), 952 bytes | 952.00 KiB/s, done. +Total 8 (delta 5), reused 0 (delta 0), pack-reused + + 0 +To https://huggingface.co/nbaldwin/ChatInteractiveFlowModule + 1849a87..1818057 pr/2 -> refs/pr/2 +``` + +Finally, review your PR changes on the Hugging Face PR page and click the `Publish` button to finalize your submission. + +![](https://hackmd.io/_uploads/rkvVV6MFn.png) + +## Develop Over an Existing Flow and Publish it Under Your Namespace + +As a Flow developer, you can easily develop based on any synced flow modules. However, instead of making a PR to the original repository, you may wish to publish it under your own namespace. This can be the case if you've made substantial changes that the original author might not prefer. + +Let’s get back to our `trivial_sync_demo`, where we leverage `nbaldwin/ChatInteractiveFlowModule`. We have made some changes to it and want to publish it on our own as `yeeef/MyChatInteractiveFlowModule`. To do this, we recommend following steps: + +**Step 1**: Manually copy the modified flow module out of the `flow_modules` directory: + +```shell +(aiflows) ➜ dev-tutorial cp -r ./flow_modules/nbaldwin/ChatInteractiveFlowModules PATH_TO_LOCAL_DEV_DIRECTORY/MyChatInteractiveFlowModules +``` + +**Step 2**: Next, we can treat it as a local file directory and sync it with a local revision: + +```python +dependencies = [ + {"url": "nbaldwin/ChatInteractiveFlowModules", "revision": "main"}, + {"url": "yeeef/MyChatInteractiveFlowModule", "revision": "PATH_TO_LOCAL_DEV_DIRECTORY/MyChatInteractiveFlowModules"}, + +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.nbaldwin.ChatInteractiveFlowModules import ChatHumanFlowModule +from flow_modules.yeeef.MyChatInteractiveFlowModules import MyChatInteractiveFlowModules + +if __name__ == "__main__": + print("it is a trivial sync demo") +``` + +**Step 3**: Finally, follow the procedure outlined in [this](#creating-your-own-flow-module) section, and you are good to go! diff --git a/docs/built_with_sphinx/html/_sources/getting_started/index.md.txt b/docs/built_with_sphinx/html/_sources/getting_started/index.md.txt new file mode 100644 index 0000000..f9e68f7 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/getting_started/index.md.txt @@ -0,0 +1,23 @@ +## Getting Started + +### [Quick start (🕓 5 min)](./Quick_Start/quick_start.md) + +Here, you'll see how you can run inference with your first question-answering Flow, and you can trivially change between vastly different question-answering Flows thanks to the modular abstraction and FlowVerse! + +### [Tutorial (🕓 20 min)](./Tutorial/tutorial_landing_page.md) + +In this tutorial, we introduce you to the library's features through a walkthrough of how to build useful Flows of gradually increasing complexity. Starting from a vanilla QA Flow, we'll first extend it to a ReAct Flow, then ReAct with human feedback, and finish the tutorial with a version of AutoGPT! + +### [Developer's Guide (🕓 10 min)](./developer_guide/developper_guide_landing_page.md) + +We are constantly optimizing our Flow development workflow (pun intended:). In this short guide, we share our best tips so that you don't have to learn the hard way. + +### [Detailed Examples](./detailed_examples/detailed_example_landing_page.md) +Many of the recently proposed prompting and collaboration strategies involving tools, humans, and AI models are, in essence, specific Flows (see the figure below). In the link above, you'll find a detailed walkthrough of how to build some representative workflows. + + + +![The Flows framework exemplified.](/media/previous_flows_rounded.png) +**The Flows framework exemplified.** The first column depicts examples of tools. Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools, constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between Atomic or Composite Flows. The fourth column illustrates a specific Composite competitive coding Flow as those used in the experiments in the [paper](https://arxiv.org/abs/2308.01285). The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior. + + diff --git a/docs/built_with_sphinx/html/_sources/index.rst.txt b/docs/built_with_sphinx/html/_sources/index.rst.txt new file mode 100644 index 0000000..49eb8ec --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/index.rst.txt @@ -0,0 +1,74 @@ +Introduction +============= + +.. toctree:: + :maxdepth: 4 + :caption: Table of Contents + :hidden: + + introduction/index + installation/index + getting_started/index + contributing_info/contribute_index + citation/index + source/modules + +.. figure:: media/logo_text_statement_alt_rounded_corners.png + :align: center + :alt: image + :width: 600px + +🤖🌊 **aiFlows** embodies the `Flows`_ (`arXiv`_) abstraction and greatly simplifies the design and +implementation of complex (work)Flows involving humans, AI systems, and tools. It enables: + +- 🧩 Modularity: Flows can be stacked like LEGO blocks into arbitrarily nested structures with the complexity hidden behind a message-based interface +- 🤝 Reusability: Flows can be shared publicly on the FlowVerse, readily downloaded and reused as part of different Flows +- 🔀 Concurrency: Being consistent with the Actor model of concurrent computation, Flows are concurrency friendly – a necessary feature for a multi-agent future + + +.. _Flows: https://github.com/epfl-dlab/aiflows/assets/flows_paper.pdf +.. _arXiv: https://arxiv.org/abs/2308.01285 + +Flows in a Nutshell +--------------------- + +The framework is centered around *Flows* and *messages*. +Flows represent the fundamental building block of computation. They are independent, self-contained, goal-driven entities able to complete a semantically meaningful unit of work. +To exchange information, Flows communicate via a standardized message-based interface. Messages can be of any type the recipient Flow can process. + +.. figure:: media/fig1_rounded_corners.png + :align: center + :alt: image + :width: 1000px + + The *Flows* framework exemplified. **The first column depicts examples of tools.** Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between *Atomic* or *Composite* Flows. The fourth column illustrates a specific *Composite* competitive coding Flow as those used in the experiments in the paper. The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior. + +FlowVerse in a Nutshell +---------------------------- + +The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, Flows can be readily downloaded, used, extended, or composed into novel, more complex For instance, sharing a Flow that uses only API-based tools (tools subsume models in the Flows abstraction) is as simple as sharing a config file. As an example, `here `_ is the AutoGPT Flow on FlowVerse. For the ones using ChatGPT, you could think of them as completely customizable open-source GPTs(++). + +The FlowVerse is continuously growing. To explore the currently available Flows, check out the FlowVerse Forum on the Discord `channel `_. Additionally, the *Tutorials* and *Detailed Examples* in the `Getting Started `_ sections cover some of the Flows we provide in more detail (e.g., the ChatAtomicFlow and QA, VisionAtomicFlow and VisualQA, ReAct and ReAct with human feedback, AutoGPT, etc.). + +Why should I use aiFlows? +---------------------------- + +AI is set to revolutionize the way we work. Our mission is to support AI researchers and to allow them to seamlessly share advancements with practitioners. This will establish a feedback loop, guiding progress toward beneficial directions while ensuring that everyone can freely access and benefit from the next-generation AI tools. + +As a researcher, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design, implement, and study arbitrarily complex interactions +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reproduce, reuse, or build on top of Flows shared on the FlowVerse and systematically study them across different settings (the infrastructure in the `cc_flows` repository could be a useful starting point in future studies). +- The ability to readily make your work accessible to practitioners and other researchers and access their feedback. + +As a practitioner, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design and implement arbitrarily complex interactions. +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reuse or build on top of Flows shared on the FlowVerse. +- Direct access to any advancements in the field. + +To develop the next-generation AI tools and simultaneously maximize the benefits, developers and researchers need to have complete control over their workflows. aiFlows strives to empower you to make each Flow your own! See the `contribute <../contributing_info/index.rst>`_ section for more information. diff --git a/docs/built_with_sphinx/html/_sources/installation/index.rst.txt b/docs/built_with_sphinx/html/_sources/installation/index.rst.txt new file mode 100644 index 0000000..c21e5ff --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/installation/index.rst.txt @@ -0,0 +1,19 @@ +Installation +=================== + +The library requires Python 3.10+. To install the library, run the following command: + +.. code-block:: shell + + pip install aiflows + +Other Installation Options +-------------------------- + +Install bleeding-edge version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: shell + + git clone git@github.com:epfl-dlab/aiflows.git + cd aiflows + pip install -e . diff --git a/docs/built_with_sphinx/html/_sources/introduction/index.rst.txt b/docs/built_with_sphinx/html/_sources/introduction/index.rst.txt new file mode 100644 index 0000000..f64d1aa --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/introduction/index.rst.txt @@ -0,0 +1,60 @@ +Introduction +============= + +.. figure:: ../media/logo_text_statement_alt_rounded_corners.png + :align: center + :alt: image + :width: 600px + +🤖🌊 **aiFlows** embodies the `Flows`_ (`arXiv`_) abstraction and greatly simplifies the design and implementation of complex (work)Flows involving humans, AI systems, and tools. It enables: + +- 🧩 Modularity: Flows can be stacked like LEGO blocks into arbitrarily nested structures with the complexity hidden behind a message-based interface +- 🤝 Reusability: Flows can be shared publicly on the FlowVerse, readily downloaded and reused as part of different Flows +- 🔀 Concurrency: Being consistent with the Actor model of concurrent computation, Flows are concurrency friendly – a necessary feature for a multi-agent future + +.. _Flows: https://github.com/epfl-dlab/aiflows/assets/flows_paper.pdf +.. _arXiv: https://arxiv.org/abs/2308.01285 + +Flows in a Nutshell +--------------------- + +The framework is centered around *Flows* and *messages*. +Flows represent the fundamental building block of computation. They are independent, self-contained, goal-driven entities able to complete a semantically meaningful unit of work. +To exchange information, Flows communicate via a standardized message-based interface. Messages can be of any type the recipient Flow can process. + +.. figure:: ../media/fig1_rounded_corners.png + :align: center + :alt: image + :width: 1000px + + The *Flows* framework exemplified. **The first column depicts examples of tools.** Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between *Atomic* or *Composite* Flows. The fourth column illustrates a specific *Composite* competitive coding Flow as those used in the experiments in the paper. The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior. + +FlowVerse in a Nutshell +---------------------------- + +The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, Flows can be readily downloaded, used, extended, or composed into novel, more complex Flows. For instance, sharing a Flow that uses only API-based tools (tools subsume models in the Flows abstraction) is as simple as sharing a config file (e.g., `here `_ is the AutoGPT Flow on FlowVerse). For the ones using ChatGPT, you could think of them as completely customizable open-source GPTs(++). + +The FlowVerse is continuously growing. To explore the currently available Flows, check out the FlowVerse Forum on the Discord `channel `_. Additionally, the *Tutorials* and *Detailed Examples* in the `Getting Started `_ sections cover some of the Flows we provide in more detail (e.g., the ChatAtomicFlow and QA, VisionAtomicFlow and VisualQA, ReAct and ReAct with human feedback, AutoGPT, etc.). + +Why should I use aiFlows? +---------------------------- + +AI is set to revolutionize the way we work. Our mission is to support AI researchers and to allow them to seamlessly share advancements with practitioners. This will establish a feedback loop, guiding progress toward beneficial directions while ensuring that everyone can freely access and benefit from the next-generation AI tools. + +As a researcher, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design, implement, and study arbitrarily complex interactions +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reproduce, reuse, or build on top of Flows shared on the FlowVerse and systematically study them across different settings (the infrastructure in the `cc_flows` repository could be a useful starting point in future studies). +- The ability to readily make your work accessible to practitioners and other researchers and access their feedback. + +As a practitioner, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design and implement arbitrarily complex interactions. +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reuse or build on top of Flows shared on the FlowVerse. +- Direct access to any advancements in the field. + +To develop the next-generation AI tools and simultaneously maximize the benefits, developers and researchers need to have complete control over their workflows. aiFlows strives to empower you to make each Flow your own! See the `contribute <../contributing_info/index.rst>`_ section for more information. diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.backends.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.backends.rst.txt new file mode 100644 index 0000000..aeb42aa --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.backends.rst.txt @@ -0,0 +1,29 @@ +aiflows.backends package +======================== + +Submodules +---------- + +aiflows.backends.api\_info module +--------------------------------- + +.. automodule:: aiflows.backends.api_info + :members: + :undoc-members: + :show-inheritance: + +aiflows.backends.llm\_lite module +--------------------------------- + +.. automodule:: aiflows.backends.llm_lite + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.backends + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.base_flows.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.base_flows.rst.txt new file mode 100644 index 0000000..2fb8b8c --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.base_flows.rst.txt @@ -0,0 +1,61 @@ +aiflows.base\_flows package +=========================== + +Submodules +---------- + +aiflows.base\_flows.abstract module +----------------------------------- + +.. automodule:: aiflows.base_flows.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.atomic module +--------------------------------- + +.. automodule:: aiflows.base_flows.atomic + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.branching module +------------------------------------ + +.. automodule:: aiflows.base_flows.branching + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.circular module +----------------------------------- + +.. automodule:: aiflows.base_flows.circular + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.composite module +------------------------------------ + +.. automodule:: aiflows.base_flows.composite + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.sequential module +------------------------------------- + +.. automodule:: aiflows.base_flows.sequential + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.base_flows + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.data_transformations.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.data_transformations.rst.txt new file mode 100644 index 0000000..a9c3906 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.data_transformations.rst.txt @@ -0,0 +1,109 @@ +aiflows.data\_transformations package +===================================== + +Submodules +---------- + +aiflows.data\_transformations.abstract module +--------------------------------------------- + +.. automodule:: aiflows.data_transformations.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.end\_of\_interaction module +--------------------------------------------------------- + +.. automodule:: aiflows.data_transformations.end_of_interaction + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.json module +----------------------------------------- + +.. automodule:: aiflows.data_transformations.json + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_copy module +---------------------------------------------- + +.. automodule:: aiflows.data_transformations.key_copy + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_delete module +------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_delete + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_match\_input module +------------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_match_input + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_rename module +------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_rename + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_select module +------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_select + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_set module +--------------------------------------------- + +.. automodule:: aiflows.data_transformations.key_set + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.print\_previous\_messages module +-------------------------------------------------------------- + +.. automodule:: aiflows.data_transformations.print_previous_messages + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.regex\_extractor\_first module +------------------------------------------------------------ + +.. automodule:: aiflows.data_transformations.regex_extractor_first + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.unnesting\_dict module +---------------------------------------------------- + +.. automodule:: aiflows.data_transformations.unnesting_dict + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.data_transformations + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.datasets.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.datasets.rst.txt new file mode 100644 index 0000000..9639658 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.datasets.rst.txt @@ -0,0 +1,37 @@ +aiflows.datasets package +======================== + +Submodules +---------- + +aiflows.datasets.abstract module +-------------------------------- + +.. automodule:: aiflows.datasets.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.datasets.demonstrations\_11 module +------------------------------------------ + +.. automodule:: aiflows.datasets.demonstrations_11 + :members: + :undoc-members: + :show-inheritance: + +aiflows.datasets.outputs module +------------------------------- + +.. automodule:: aiflows.datasets.outputs + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.datasets + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.flow_cache.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.flow_cache.rst.txt new file mode 100644 index 0000000..07e99c9 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.flow_cache.rst.txt @@ -0,0 +1,21 @@ +aiflows.flow\_cache package +=========================== + +Submodules +---------- + +aiflows.flow\_cache.flow\_cache module +-------------------------------------- + +.. automodule:: aiflows.flow_cache.flow_cache + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.flow_cache + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.flow_launchers.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.flow_launchers.rst.txt new file mode 100644 index 0000000..2e924b2 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.flow_launchers.rst.txt @@ -0,0 +1,29 @@ +aiflows.flow\_launchers package +=============================== + +Submodules +---------- + +aiflows.flow\_launchers.abstract module +--------------------------------------- + +.. automodule:: aiflows.flow_launchers.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.flow\_launchers.flow\_API\_launcher module +-------------------------------------------------- + +.. automodule:: aiflows.flow_launchers.flow_API_launcher + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.flow_launchers + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.flow_verse.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.flow_verse.rst.txt new file mode 100644 index 0000000..c9c024b --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.flow_verse.rst.txt @@ -0,0 +1,29 @@ +aiflows.flow\_verse package +=========================== + +Submodules +---------- + +aiflows.flow\_verse.loading module +---------------------------------- + +.. automodule:: aiflows.flow_verse.loading + :members: + :undoc-members: + :show-inheritance: + +aiflows.flow\_verse.utils module +-------------------------------- + +.. automodule:: aiflows.flow_verse.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.flow_verse + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.history.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.history.rst.txt new file mode 100644 index 0000000..e47e4a4 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.history.rst.txt @@ -0,0 +1,21 @@ +aiflows.history package +======================= + +Submodules +---------- + +aiflows.history.flow\_history module +------------------------------------ + +.. automodule:: aiflows.history.flow_history + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.history + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.interfaces.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.interfaces.rst.txt new file mode 100644 index 0000000..555e56c --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.interfaces.rst.txt @@ -0,0 +1,29 @@ +aiflows.interfaces package +========================== + +Submodules +---------- + +aiflows.interfaces.abstract module +---------------------------------- + +.. automodule:: aiflows.interfaces.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.interfaces.key\_interface module +---------------------------------------- + +.. automodule:: aiflows.interfaces.key_interface + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.interfaces + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.messages.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.messages.rst.txt new file mode 100644 index 0000000..a7b2044 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.messages.rst.txt @@ -0,0 +1,29 @@ +aiflows.messages package +======================== + +Submodules +---------- + +aiflows.messages.abstract module +-------------------------------- + +.. automodule:: aiflows.messages.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.messages.flow\_message module +------------------------------------- + +.. automodule:: aiflows.messages.flow_message + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.messages + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.prompt_template.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.prompt_template.rst.txt new file mode 100644 index 0000000..261ef48 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.prompt_template.rst.txt @@ -0,0 +1,21 @@ +aiflows.prompt\_template package +================================ + +Submodules +---------- + +aiflows.prompt\_template.jinja2\_prompts module +----------------------------------------------- + +.. automodule:: aiflows.prompt_template.jinja2_prompts + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.prompt_template + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.rst.txt new file mode 100644 index 0000000..4ef6939 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.rst.txt @@ -0,0 +1,29 @@ +aiflows package +=============== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + aiflows.backends + aiflows.base_flows + aiflows.data_transformations + aiflows.datasets + aiflows.flow_cache + aiflows.flow_launchers + aiflows.flow_verse + aiflows.history + aiflows.interfaces + aiflows.messages + aiflows.prompt_template + aiflows.utils + +Module contents +--------------- + +.. automodule:: aiflows + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/aiflows.utils.rst.txt b/docs/built_with_sphinx/html/_sources/source/aiflows.utils.rst.txt new file mode 100644 index 0000000..fd4af16 --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/aiflows.utils.rst.txt @@ -0,0 +1,45 @@ +aiflows.utils package +===================== + +Submodules +---------- + +aiflows.utils.general\_helpers module +------------------------------------- + +.. automodule:: aiflows.utils.general_helpers + :members: + :undoc-members: + :show-inheritance: + +aiflows.utils.io\_utils module +------------------------------ + +.. automodule:: aiflows.utils.io_utils + :members: + :undoc-members: + :show-inheritance: + +aiflows.utils.logging module +---------------------------- + +.. automodule:: aiflows.utils.logging + :members: + :undoc-members: + :show-inheritance: + +aiflows.utils.rich\_utils module +-------------------------------- + +.. automodule:: aiflows.utils.rich_utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/built_with_sphinx/html/_sources/source/modules.rst.txt b/docs/built_with_sphinx/html/_sources/source/modules.rst.txt new file mode 100644 index 0000000..f823ffb --- /dev/null +++ b/docs/built_with_sphinx/html/_sources/source/modules.rst.txt @@ -0,0 +1,7 @@ +aiflows +======= + +.. toctree:: + :maxdepth: 4 + + aiflows diff --git a/docs/built_with_sphinx/html/_static/0ff19efc74e94c856af0.woff2 b/docs/built_with_sphinx/html/_static/0ff19efc74e94c856af0.woff2 new file mode 100644 index 0000000..a1a6a20 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/0ff19efc74e94c856af0.woff2 differ diff --git a/docs/built_with_sphinx/html/_static/26400cae88e50682937d.woff b/docs/built_with_sphinx/html/_static/26400cae88e50682937d.woff new file mode 100644 index 0000000..d380900 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/26400cae88e50682937d.woff differ diff --git a/docs/built_with_sphinx/html/_static/2a472f0334546ace60b3.woff2 b/docs/built_with_sphinx/html/_static/2a472f0334546ace60b3.woff2 new file mode 100644 index 0000000..e2e8c0e Binary files /dev/null and b/docs/built_with_sphinx/html/_static/2a472f0334546ace60b3.woff2 differ diff --git a/docs/built_with_sphinx/html/_static/3925889378745d0382d0.woff b/docs/built_with_sphinx/html/_static/3925889378745d0382d0.woff new file mode 100644 index 0000000..d3665f5 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/3925889378745d0382d0.woff differ diff --git a/docs/built_with_sphinx/html/_static/4163112e566ed7697acf.woff2 b/docs/built_with_sphinx/html/_static/4163112e566ed7697acf.woff2 new file mode 100644 index 0000000..df4a627 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/4163112e566ed7697acf.woff2 differ diff --git a/docs/built_with_sphinx/html/_static/5b12b1b913a1d0348fc6.woff b/docs/built_with_sphinx/html/_static/5b12b1b913a1d0348fc6.woff new file mode 100644 index 0000000..90a3993 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/5b12b1b913a1d0348fc6.woff differ diff --git a/docs/built_with_sphinx/html/_static/6c1a3008005254946aef.woff b/docs/built_with_sphinx/html/_static/6c1a3008005254946aef.woff new file mode 100644 index 0000000..751d007 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/6c1a3008005254946aef.woff differ diff --git a/docs/built_with_sphinx/html/_static/aef37e2fab43d03531cd.woff2 b/docs/built_with_sphinx/html/_static/aef37e2fab43d03531cd.woff2 new file mode 100644 index 0000000..0fb62e3 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/aef37e2fab43d03531cd.woff2 differ diff --git a/docs/built_with_sphinx/html/_static/awesome-docsearch.css b/docs/built_with_sphinx/html/_static/awesome-docsearch.css new file mode 100644 index 0000000..d68c222 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/awesome-docsearch.css @@ -0,0 +1 @@ +:root{--docsearch-primary-color:hsl(var(--primary));--docsearch-key-gradient:transparent;--docsearch-key-shadow:transparent;--docsearch-text-color:hsl(var(--popover-foreground));--docsearch-modal-width:760px;--docsearch-modal-background:hsl(var(--popover));--docsearch-footer-background:hsl(var(--popover));--docsearch-searchbox-focus-background:hsl(var(--popover));--docsearch-container-background:hsl(var(--background)/0.8);--docsearch-spacing:0.5rem;--docsearch-hit-active-color:hsl(var(--accent-foreground));--docsearch-hit-background:transparent;--docsearch-searchbox-shadow:none;--docsearch-hit-shadow:none;--docsearch-modal-shadow:none;--docsearch-footer-shadow:none}.DocSearch-Button{--tw-ring-offset-color:hsl(var(--background));background-color:transparent;border-color:hsl(var(--input));border-radius:.5em;border-style:solid;border-width:1px;display:flex;font-size:.875rem;line-height:1.25rem;transition-duration:.15s;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);width:90%}.DocSearch-Button:hover{--tw-shadow:0 0 transparent;--tw-shadow-colored:0 0 transparent;box-shadow:0 0 transparent,0 0 transparent,0 0 transparent;box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow)}.DocSearch-Button:focus,.DocSearch-Button:hover{background-color:hsl(var(--accent));color:hsl(var(--accent-foreground))}.DocSearch-Button:focus-visible{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color);--tw-ring-color:hsl(var(--ring));--tw-ring-offset-width:2px;box-shadow:var(--tw-ring-inset) 0 0 0 2px var(--tw-ring-offset-color),var(--tw-ring-inset) 0 0 0 4px hsl(var(--ring)),0 0 transparent;box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 transparent);outline:2px solid transparent;outline-offset:2px}.DocSearch-Button-Placeholder{display:block;font-size:.875rem;font-weight:500;line-height:1.25rem}.DocSearch-Button-Key{background-color:hsl(var(--muted));border-color:hsl(var(--border));border-radius:.25rem;border-style:solid;border-width:1px;color:hsl(var(--muted-foreground));font-size:12px}.DocSearch-Container{--tw-backdrop-blur:blur(4px);-webkit-backdrop-filter:blur(4px) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:blur(4px) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);position:fixed}.DocSearch-Modal{border-color:hsl(var(--border));border-radius:var(--radius);border-width:1px}.DocSearch-SearchBar{border-bottom-left-radius:0;border-bottom-right-radius:0;border-bottom-width:1px;border-color:hsl(var(--input));border-top-left-radius:var(--radius);border-top-right-radius:var(--radius);padding:0}.DocSearch-Form{border-bottom-left-radius:0;border-bottom-right-radius:0;border-top-left-radius:var(--radius);border-top-right-radius:var(--radius)}.DocSearch-Cancel{color:hsl(var(--muted-foreground));font-size:.875rem;line-height:1.25rem;padding-left:.5rem;padding-right:.5rem}.DocSearch-MagnifierLabel,.DocSearch-Search-Icon{stroke-width:2;opacity:.5}.DocSearch-Hit-source{color:hsl(var(--muted-foreground))}.DocSearch-Hit,.DocSearch-Hit a{border-radius:calc(var(--radius) - 4px)}.DocSearch-Hit a:focus-visible{outline-offset:-2px}.DocSearch-Hit[aria-selected=true] a{background-color:hsl(var(--accent));color:hsl(var(--accent-foreground))}.DocSearch-Commands{display:none}.DocSearch-Footer{border-color:hsl(var(--border));border-top-width:1px} diff --git a/docs/built_with_sphinx/html/_static/awesome-docsearch.js b/docs/built_with_sphinx/html/_static/awesome-docsearch.js new file mode 100644 index 0000000..e69de29 diff --git a/docs/built_with_sphinx/html/_static/awesome-sphinx-design.css b/docs/built_with_sphinx/html/_static/awesome-sphinx-design.css new file mode 100644 index 0000000..28aa7e1 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/awesome-sphinx-design.css @@ -0,0 +1 @@ +:root{--sd-color-tabs-label-active:hsl(var(--foreground));--sd-color-tabs-underline-active:hsl(var(--accent-foreground));--sd-color-tabs-label-hover:hsl(var(--accent-foreground));--sd-color-tabs-overline:hsl(var(--border));--sd-color-tabs-underline:hsl(var(--border))}.sd-card{background-color:hsl(var(--card));border-color:hsl(var(--border));border-radius:var(--radius);border-width:1px;color:hsl(var(--card-foreground));margin-top:1.5rem}.sd-container-fluid{margin-bottom:1.5rem;margin-top:1.5rem}.sd-card-title{font-weight:600!important}.sd-summary-title{color:hsl(var(--muted-foreground));font-weight:500!important}.sd-card-footer,.sd-card-header{font-size:.875rem;line-height:1.25rem}.sd-tab-set{margin-top:1.5rem}.sd-tab-content>p{margin-bottom:1.5rem}.sd-tab-content pre:first-of-type{margin-top:0}.sd-tab-set>label{font-weight:500;letter-spacing:.05em}details.sd-dropdown,details.sd-dropdown:not([open])>.sd-card-header{border-color:hsl(var(--border))}details.sd-dropdown summary:focus{outline-style:solid}.sd-cards-carousel{overflow-x:auto}.sd-shadow-sm{--tw-shadow:0 0 transparent!important;--tw-shadow-colored:0 0 transparent!important;box-shadow:0 0 transparent,0 0 transparent,0 0 transparent!important;box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow)!important} diff --git a/docs/built_with_sphinx/html/_static/awesome-sphinx-design.js b/docs/built_with_sphinx/html/_static/awesome-sphinx-design.js new file mode 100644 index 0000000..e69de29 diff --git a/docs/built_with_sphinx/html/_static/b8546ea1646db8ea9c7f.woff b/docs/built_with_sphinx/html/_static/b8546ea1646db8ea9c7f.woff new file mode 100644 index 0000000..90ca64d Binary files /dev/null and b/docs/built_with_sphinx/html/_static/b8546ea1646db8ea9c7f.woff differ diff --git a/docs/built_with_sphinx/html/_static/basic.css b/docs/built_with_sphinx/html/_static/basic.css new file mode 100644 index 0000000..7577acb --- /dev/null +++ b/docs/built_with_sphinx/html/_static/basic.css @@ -0,0 +1,903 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_static/c10c163dd1c289f11c49.woff2 b/docs/built_with_sphinx/html/_static/c10c163dd1c289f11c49.woff2 new file mode 100644 index 0000000..baf92ae Binary files /dev/null and b/docs/built_with_sphinx/html/_static/c10c163dd1c289f11c49.woff2 differ diff --git a/docs/built_with_sphinx/html/_static/check-solid.svg b/docs/built_with_sphinx/html/_static/check-solid.svg new file mode 100644 index 0000000..92fad4b --- /dev/null +++ b/docs/built_with_sphinx/html/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/docs/built_with_sphinx/html/_static/clipboard.min.js b/docs/built_with_sphinx/html/_static/clipboard.min.js new file mode 100644 index 0000000..54b3c46 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/docs/built_with_sphinx/html/_static/copybutton.css b/docs/built_with_sphinx/html/_static/copybutton.css new file mode 100644 index 0000000..f1916ec --- /dev/null +++ b/docs/built_with_sphinx/html/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/docs/built_with_sphinx/html/_static/copybutton.js b/docs/built_with_sphinx/html/_static/copybutton.js new file mode 100644 index 0000000..2ea7ff3 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copier dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_static/copybutton_funcs.js b/docs/built_with_sphinx/html/_static/copybutton_funcs.js new file mode 100644 index 0000000..dbe1aaa --- /dev/null +++ b/docs/built_with_sphinx/html/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/docs/built_with_sphinx/html/_static/doctools.js b/docs/built_with_sphinx/html/_static/doctools.js new file mode 100644 index 0000000..d06a71d --- /dev/null +++ b/docs/built_with_sphinx/html/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/docs/built_with_sphinx/html/_static/documentation_options.js b/docs/built_with_sphinx/html/_static/documentation_options.js new file mode 100644 index 0000000..b57ae3b --- /dev/null +++ b/docs/built_with_sphinx/html/_static/documentation_options.js @@ -0,0 +1,14 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_static/f4604891b5f1fc1bdbe5.woff2 b/docs/built_with_sphinx/html/_static/f4604891b5f1fc1bdbe5.woff2 new file mode 100644 index 0000000..a017aa4 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/f4604891b5f1fc1bdbe5.woff2 differ diff --git a/docs/built_with_sphinx/html/_static/f509ddf49c74ded8c0ee.woff b/docs/built_with_sphinx/html/_static/f509ddf49c74ded8c0ee.woff new file mode 100644 index 0000000..b047231 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/f509ddf49c74ded8c0ee.woff differ diff --git a/docs/built_with_sphinx/html/_static/file.png b/docs/built_with_sphinx/html/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/file.png differ diff --git a/docs/built_with_sphinx/html/_static/flows_logo_round.png b/docs/built_with_sphinx/html/_static/flows_logo_round.png new file mode 100644 index 0000000..4d38276 Binary files /dev/null and b/docs/built_with_sphinx/html/_static/flows_logo_round.png differ diff --git a/docs/built_with_sphinx/html/_static/language_data.js b/docs/built_with_sphinx/html/_static/language_data.js new file mode 100644 index 0000000..250f566 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, is available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/docs/built_with_sphinx/html/_static/minus.png b/docs/built_with_sphinx/html/_static/minus.png new file mode 100644 index 0000000..d96755f Binary files /dev/null and b/docs/built_with_sphinx/html/_static/minus.png differ diff --git a/docs/built_with_sphinx/html/_static/plus.png b/docs/built_with_sphinx/html/_static/plus.png new file mode 100644 index 0000000..7107cec Binary files /dev/null and b/docs/built_with_sphinx/html/_static/plus.png differ diff --git a/docs/built_with_sphinx/html/_static/pygments.css b/docs/built_with_sphinx/html/_static/pygments.css new file mode 100644 index 0000000..2eb16e4 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/pygments.css @@ -0,0 +1,44 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #ffffff; } +.highlight .c { font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { font-weight: bold } /* Keyword */ +.highlight .ch { font-style: italic } /* Comment.Hashbang */ +.highlight .cm { font-style: italic } /* Comment.Multiline */ +.highlight .cpf { font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { font-style: italic } /* Comment.Single */ +.highlight .cs { font-style: italic } /* Comment.Special */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .gh { font-weight: bold } /* Generic.Heading */ +.highlight .gp { font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { font-weight: bold } /* Generic.Subheading */ +.highlight .kc { font-weight: bold } /* Keyword.Constant */ +.highlight .kd { font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { font-weight: bold } /* Keyword.Namespace */ +.highlight .kr { font-weight: bold } /* Keyword.Reserved */ +.highlight .s { font-style: italic } /* Literal.String */ +.highlight .nc { font-weight: bold } /* Name.Class */ +.highlight .ni { font-weight: bold } /* Name.Entity */ +.highlight .ne { font-weight: bold } /* Name.Exception */ +.highlight .nn { font-weight: bold } /* Name.Namespace */ +.highlight .nt { font-weight: bold } /* Name.Tag */ +.highlight .ow { font-weight: bold } /* Operator.Word */ +.highlight .sa { font-style: italic } /* Literal.String.Affix */ +.highlight .sb { font-style: italic } /* Literal.String.Backtick */ +.highlight .sc { font-style: italic } /* Literal.String.Char */ +.highlight .dl { font-style: italic } /* Literal.String.Delimiter */ +.highlight .sd { font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { font-style: italic } /* Literal.String.Double */ +.highlight .se { font-weight: bold; font-style: italic } /* Literal.String.Escape */ +.highlight .sh { font-style: italic } /* Literal.String.Heredoc */ +.highlight .si { font-weight: bold; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { font-style: italic } /* Literal.String.Other */ +.highlight .sr { font-style: italic } /* Literal.String.Regex */ +.highlight .s1 { font-style: italic } /* Literal.String.Single */ +.highlight .ss { font-style: italic } /* Literal.String.Symbol */ \ No newline at end of file diff --git a/docs/built_with_sphinx/html/_static/searchtools.js b/docs/built_with_sphinx/html/_static/searchtools.js new file mode 100644 index 0000000..97d56a7 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/searchtools.js @@ -0,0 +1,566 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docUrlRoot = DOCUMENTATION_OPTIONS.URL_ROOT; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = docUrlRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = docUrlRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/docs/built_with_sphinx/html/_static/sphinx_highlight.js b/docs/built_with_sphinx/html/_static/sphinx_highlight.js new file mode 100644 index 0000000..aae669d --- /dev/null +++ b/docs/built_with_sphinx/html/_static/sphinx_highlight.js @@ -0,0 +1,144 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + parent.insertBefore( + span, + parent.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(SphinxHighlight.highlightSearchWords); +_ready(SphinxHighlight.initEscapeListener); diff --git a/docs/built_with_sphinx/html/_static/theme.css b/docs/built_with_sphinx/html/_static/theme.css new file mode 100644 index 0000000..a3493f8 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/theme.css @@ -0,0 +1,7 @@ +@font-face{font-display:swap;font-family:JetBrains Mono;font-style:italic;font-weight:400;src:url(2a472f0334546ace60b3.woff2) format("woff2"),url(f509ddf49c74ded8c0ee.woff) format("woff")} +@font-face{font-display:swap;font-family:JetBrains Mono;font-style:normal;font-weight:400;src:url(4163112e566ed7697acf.woff2) format("woff2"),url(6c1a3008005254946aef.woff) format("woff")} +@font-face{font-display:swap;font-family:JetBrains Mono;font-style:italic;font-weight:500;src:url(c10c163dd1c289f11c49.woff2) format("woff2"),url(5b12b1b913a1d0348fc6.woff) format("woff")} +@font-face{font-display:swap;font-family:JetBrains Mono;font-style:normal;font-weight:500;src:url(0ff19efc74e94c856af0.woff2) format("woff2"),url(26400cae88e50682937d.woff) format("woff")} +@font-face{font-display:swap;font-family:JetBrains Mono;font-style:italic;font-weight:700;src:url(aef37e2fab43d03531cd.woff2) format("woff2"),url(b8546ea1646db8ea9c7f.woff) format("woff")} +@font-face{font-display:swap;font-family:JetBrains Mono;font-style:normal;font-weight:700;src:url(f4604891b5f1fc1bdbe5.woff2) format("woff2"),url(3925889378745d0382d0.woff) format("woff")} +/*! tailwindcss v3.3.5 | MIT License | https://tailwindcss.com*/*,:after,:before{border:0 solid #e5e7eb;box-sizing:border-box}:after,:before{--tw-content:""}html{-webkit-text-size-adjust:100%;font-feature-settings:normal;font-family:ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,Helvetica Neue,Arial,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;font-variation-settings:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4}body{line-height:inherit;margin:0}hr{border-top-width:1px;color:inherit;height:0}abbr:where([title]){text-decoration:underline;-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:JetBrains\ Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{border-collapse:collapse;border-color:inherit;text-indent:0}button,input,optgroup,select,textarea{font-feature-settings:inherit;color:inherit;font-family:inherit;font-size:100%;font-variation-settings:inherit;font-weight:inherit;line-height:inherit;margin:0;padding:0}button,select{text-transform:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0}fieldset,legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{color:#9ca3af;opacity:1}input::placeholder,textarea::placeholder{color:#9ca3af;opacity:1}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{height:auto;max-width:100%}[hidden]{display:none}:root{--background:0 0% 100%;--foreground:222.2 47.4% 11.2%;--muted:210 40% 96.1%;--muted-foreground:215.4 16.3% 46.9%;--popover:0 0% 100%;--popover-foreground:222.2 47.4% 11.2%;--border:214.3 31.8% 91.4%;--input:214.3 31.8% 91.4%;--card:0 0% 100%;--card-foreground:222.2 47.4% 11.2%;--primary:222.2 47.4% 11.2%;--primary-foreground:210 40% 98%;--secondary:210 40% 96.1%;--secondary-foreground:222.2 47.4% 11.2%;--accent:210 40% 96.1%;--accent-foreground:222.2 47.4% 11.2%;--destructive:0 100% 50%;--destructive-foreground:210 40% 98%;--ring:215 20.2% 65.1%;--radius:0.5rem}.dark{--background:224 71% 4%;--foreground:213 31% 91%;--muted:223 47% 11%;--muted-foreground:215.4 16.3% 56.9%;--accent:216 34% 17%;--accent-foreground:210 40% 98%;--popover:224 71% 4%;--popover-foreground:215 20.2% 65.1%;--border:216 34% 17%;--input:216 34% 17%;--card:224 71% 4%;--card-foreground:213 31% 91%;--primary:210 40% 98%;--primary-foreground:222.2 47.4% 1.2%;--secondary:222.2 47.4% 11.2%;--secondary-foreground:210 40% 98%;--destructive:0 63% 31%;--destructive-foreground:210 40% 98%;--ring:216 34% 17%;--radius:0.5rem}*,:after,:before{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 transparent;--tw-ring-shadow:0 0 transparent;--tw-shadow:0 0 transparent;--tw-shadow-colored:0 0 transparent;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: }::backdrop{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 transparent;--tw-ring-shadow:0 0 transparent;--tw-shadow:0 0 transparent;--tw-shadow-colored:0 0 transparent;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: }.container{margin-left:auto;margin-right:auto;padding-left:2rem;padding-right:2rem;width:100%}@media (min-width:1400px){.container{max-width:1400px}}#content svg{display:inline}#content hr{border-color:#e2e8f0;border-color:hsl(var(--border));margin-bottom:1rem;margin-top:1rem}@media (min-width:768px){#content hr{margin-bottom:1.5rem;margin-top:1.5rem}}#content h1{font-size:2.25rem;font-weight:700;line-height:2.5rem;margin-bottom:.5rem}#content h2{border-bottom-width:1px;border-color:#e2e8f0;border-color:hsl(var(--border));font-size:1.875rem;font-weight:600;line-height:2.25rem;margin-top:3rem;padding-bottom:.5rem}#content h3{font-size:1.5rem;font-weight:600;line-height:2rem;margin-top:2rem}#content .rubric,#content h4{font-size:1.25rem;font-weight:600;line-height:1.75rem;margin-top:2rem}#content section{scroll-margin:5rem}#content section>p{line-height:1.75rem;margin-top:1.5rem}#content section>p.rubric,#content section>p:first-child{margin-top:0}#content section>p.lead{color:#64748b;color:hsl(var(--muted-foreground));font-size:1.125rem;line-height:1.75rem}#content .centered{text-align:center}#content a:not(.toc-backref){color:#0f172a;color:hsl(var(--primary));font-weight:500;text-decoration-line:underline;text-decoration-thickness:from-font;text-underline-offset:4px}#content ul:not(.search){list-style-type:disc;margin-left:1.5rem;margin-top:1.5rem}#content ul:not(.search) p,#content ul:not(.search)>li{margin-top:1.5rem}#content ul:not(.search) ul{margin-top:0}#content ol{list-style-type:decimal;margin-left:1.5rem;margin-top:1.5rem}#content ol ::marker{font-weight:500}#content ol::marker{font-weight:500}#content ol p,#content ol>li{margin-top:1.5rem}#content ol ol{margin-top:0}#content dl{margin-top:1.5rem}#content dl dt:not(.sig){font-weight:500;margin-top:1.5rem}#content dl dt:not(.sig):first-child{margin-bottom:0;margin-top:0}#content dl dd{margin-left:1.5rem}#content dl p{margin-bottom:.5rem;margin-top:.5rem}#content .align-center{margin-left:auto;margin-right:auto;text-align:center}#content .align-right{margin-left:auto;text-align:right}#content img{margin-top:1.5rem}#content figure img{display:inline-block}#content figcaption{color:#64748b;color:hsl(var(--muted-foreground));font-size:.875rem;line-height:1.25rem;margin-bottom:3rem}#content figcaption>*{margin-top:1rem}blockquote{border-left-width:2px;font-style:italic;margin-bottom:1.5rem;margin-top:1.5rem;padding-left:1.5rem}blockquote .attribution{font-style:normal;margin-top:.5rem}table{font-size:.875rem;line-height:1.25rem;margin-bottom:1.5rem;margin-top:1.5rem;width:100%}table caption{color:#64748b;color:hsl(var(--muted-foreground));margin-bottom:1.5rem;text-align:left}table thead{border-bottom-width:1px;border-color:#e2e8f0;border-color:hsl(var(--border))}table th{font-weight:500;padding-bottom:.5rem;padding-left:.5rem;text-align:left}table th:first-child{padding-left:0}.dark table th{font-weight:600}table tbody tr{border-bottom-width:1px;border-color:#e2e8f0;border-color:hsl(var(--border))}table tbody td{padding:.5rem}table tbody td:first-child{padding-left:0}.footnote>.label{float:left;padding-right:.5rem}.footnote>:not(.label){margin-bottom:1.5rem;margin-left:2rem;margin-top:1.5rem}.footnote .footnote-reference,.footnote [role=doc-backlink]{text-decoration-line:none!important}.admonition{background-color:#fff;background-color:hsl(var(--background));border-color:#e2e8f0;border-color:hsl(var(--border));border-radius:.5rem;border-radius:var(--radius);border-width:1px;color:#0f172a;color:hsl(var(--foreground));font-size:.875rem;line-height:1.25rem;margin-bottom:1.5rem;margin-top:1.5rem;padding:1rem}.admonition p:not(.admonition-title){margin-top:.5rem}.admonition .admonition-title{margin-top:0!important}.admonition-title{font-weight:500}.dark .admonition-title{font-weight:600;letter-spacing:.025em}.note{--tw-border-opacity:1;--tw-bg-opacity:1;--tw-text-opacity:1;background-color:#f0f9ff;background-color:rgba(240,249,255,var(--tw-bg-opacity));border-color:#0284c7;border-color:rgba(2,132,199,var(--tw-border-opacity));color:#0c4a6e;color:rgba(12,74,110,var(--tw-text-opacity))}.dark .note{--tw-text-opacity:1;background-color:rgba(96,165,250,.15);color:#e0f2fe;color:rgba(224,242,254,var(--tw-text-opacity))}.hint,.tip{--tw-border-opacity:1;--tw-bg-opacity:1;--tw-text-opacity:1;background-color:#f0fdf4;background-color:rgba(240,253,244,var(--tw-bg-opacity));border-color:#16a34a;border-color:rgba(22,163,74,var(--tw-border-opacity));color:#14532d;color:rgba(20,83,45,var(--tw-text-opacity))}.dark .hint,.dark .tip{--tw-text-opacity:1;background-color:rgba(74,222,128,.15);color:#dcfce7;color:rgba(220,252,231,var(--tw-text-opacity))}.danger,.error{--tw-border-opacity:1;--tw-bg-opacity:1;--tw-text-opacity:1;background-color:#fef2f2;background-color:rgba(254,242,242,var(--tw-bg-opacity));border-color:#dc2626;border-color:rgba(220,38,38,var(--tw-border-opacity));color:#7f1d1d;color:rgba(127,29,29,var(--tw-text-opacity))}.dark .danger,.dark .error{--tw-text-opacity:1;background-color:hsla(0,91%,71%,.15);color:#fee2e2;color:rgba(254,226,226,var(--tw-text-opacity))}.attention,.caution,.important,.warning{--tw-border-opacity:1;--tw-bg-opacity:1;--tw-text-opacity:1;background-color:#fefce8;background-color:rgba(254,252,232,var(--tw-bg-opacity));border-color:#ca8a04;border-color:rgba(202,138,4,var(--tw-border-opacity));color:#713f12;color:rgba(113,63,18,var(--tw-text-opacity))}.dark .attention,.dark .caution,.dark .important,.dark .warning{--tw-text-opacity:1;background-color:rgba(250,204,21,.15);color:#fef9c3;color:rgba(254,249,195,var(--tw-text-opacity))}div.versionadded{--tw-border-opacity:1;border-color:#16a34a;border-color:rgba(22,163,74,var(--tw-border-opacity));border-left-width:3px;font-size:.875rem;line-height:1.25rem;margin-top:1rem;padding:.25rem 1rem}div.versionadded p{margin-top:0!important}div.versionadded p:last-child{margin-bottom:0!important}div.versionadded .versionmodified{--tw-text-opacity:1;color:#14532d;color:rgba(20,83,45,var(--tw-text-opacity));font-weight:500}.dark div.versionadded .versionmodified{--tw-text-opacity:1;color:#22c55e;color:rgba(34,197,94,var(--tw-text-opacity));letter-spacing:.025em}div.versionchanged{--tw-border-opacity:1;border-color:#ca8a04;border-color:rgba(202,138,4,var(--tw-border-opacity));border-left-width:3px;font-size:.875rem;line-height:1.25rem;margin-top:1rem;padding:.25rem 1rem}div.versionchanged p{margin-top:0!important}div.versionchanged p:last-child{margin-bottom:0!important}div.versionchanged .versionmodified{--tw-text-opacity:1;color:#713f12;color:rgba(113,63,18,var(--tw-text-opacity));font-weight:500}.dark div.versionchanged .versionmodified{--tw-text-opacity:1;color:#eab308;color:rgba(234,179,8,var(--tw-text-opacity));letter-spacing:.025em}div.deprecated{--tw-border-opacity:1;border-color:#dc2626;border-color:rgba(220,38,38,var(--tw-border-opacity));border-left-width:3px;font-size:.875rem;line-height:1.25rem;margin-top:1rem;padding:.25rem 1rem}div.deprecated p{margin-top:0!important}div.deprecated p:last-child{margin-bottom:0!important}div.deprecated .versionmodified{--tw-text-opacity:1;color:#7f1d1d;color:rgba(127,29,29,var(--tw-text-opacity));font-weight:500}.dark div.deprecated .versionmodified{--tw-text-opacity:1;color:#f87171;color:rgba(248,113,113,var(--tw-text-opacity));letter-spacing:.025em}.highlight{background-color:transparent;position:relative}.highlight:hover .copy{opacity:1}.highlight .gp,.highlight-pycon .go,.highlight-python .go{-webkit-user-select:none;-moz-user-select:none;user-select:none}.literal-block-wrapper{border-color:#e2e8f0;border-color:hsl(var(--border));border-radius:.5rem;border-radius:var(--radius);border-width:1px;margin-left:0;margin-right:0;margin-top:1.5rem;max-width:none;padding-left:0;padding-right:0}.literal-block-wrapper pre{border-radius:0;border-style:none;margin-top:0}.literal-block-wrapper .code-block-caption{border-bottom-width:1px;border-color:#e2e8f0;border-color:hsl(var(--border));border-top-left-radius:.5rem;border-top-left-radius:var(--radius);border-top-right-radius:.5rem;border-top-right-radius:var(--radius);color:#64748b;color:hsl(var(--muted-foreground));font-size:.875rem;letter-spacing:.025em;line-height:1.25rem;padding:.5rem 1rem}code{background-color:#f1f5f9;background-color:hsl(var(--muted));border-radius:.25rem;font-family:JetBrains\ Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:.875rem;line-height:1.25rem;padding:.2em .3em;position:relative;white-space:nowrap}code .ge,code em{color:#0f172a;color:hsl(var(--accent-foreground));font-weight:700;letter-spacing:.025em}pre{border-color:#e2e8f0;border-color:hsl(var(--border));border-radius:.5rem;border-radius:var(--radius);border-width:1px;font-size:.875rem;line-height:1.25rem;margin-top:1.5rem;overflow-x:auto;padding-bottom:1rem;padding-top:1rem}pre[data-theme=dark]{background-color:#fff;background-color:hsl(var(--background))}pre[data-theme=light]{--tw-bg-opacity:1;background-color:#fff;background-color:rgba(255,255,255,var(--tw-bg-opacity))}pre.literal-block{padding-left:1rem;padding-right:1rem}pre code{background-color:transparent;padding:0;white-space:pre}pre code>[id^=line-]{display:block;padding-left:1rem;padding-right:1rem}pre code [id^=line-]:has(.gd),pre code [id^=line-]:has(.gi),pre code [id^=line-]:has(del),pre code [id^=line-]:has(ins),pre code [id^=line-]:has(mark){padding-left:0;padding-right:0}pre code [id^=line-] del,pre code [id^=line-] ins,pre code [id^=line-] mark{display:block;padding-left:1rem;padding-right:1rem;position:relative}pre code [id^=line-] mark{--tw-shadow:2px 0 currentColor inset;--tw-shadow-colored:inset 2px 0 var(--tw-shadow-color);background-color:#f1f5f9;background-color:hsl(var(--muted));box-shadow:0 0 transparent,0 0 transparent,inset 2px 0 currentColor;box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow);color:inherit}.dark pre code [id^=line-] mark{--tw-bg-opacity:1;--tw-shadow:3px 0 currentColor inset;--tw-shadow-colored:inset 3px 0 var(--tw-shadow-color);background-color:#334155;background-color:rgba(51,65,85,var(--tw-bg-opacity));box-shadow:0 0 transparent,0 0 transparent,inset 3px 0 currentColor;box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow)}pre code [id^=line-] ins{--tw-text-opacity:1;background-color:rgba(34,197,94,.3);color:#14532d;color:rgba(20,83,45,var(--tw-text-opacity));text-decoration-line:none}pre code [id^=line-] ins:before{--tw-content:"\002b";content:"\002b";content:var(--tw-content);left:2px;position:absolute}.dark pre code [id^=line-] ins{--tw-bg-opacity:1;--tw-text-opacity:1;color:#bbf7d0;color:rgba(187,247,208,var(--tw-text-opacity))}pre code [id^=line-] del{--tw-text-opacity:1;background-color:rgba(239,68,68,.3);color:#7f1d1d;color:rgba(127,29,29,var(--tw-text-opacity));text-decoration-line:none}pre code [id^=line-] del:before{--tw-content:"\2212";content:"\2212";content:var(--tw-content);left:2px;position:absolute}.dark pre code [id^=line-] del{--tw-bg-opacity:1;--tw-text-opacity:1;color:#fecaca;color:rgba(254,202,202,var(--tw-text-opacity))}pre .linenos{padding-left:0;padding-right:1rem;-webkit-user-select:none;-moz-user-select:none;user-select:none}.highlight-diff .gi{--tw-text-opacity:1;background-color:rgba(34,197,94,.3);color:#14532d;color:rgba(20,83,45,var(--tw-text-opacity));display:inline-block;padding-left:1rem;padding-right:1rem;width:100%}.dark .highlight-diff .gi{--tw-bg-opacity:1;--tw-text-opacity:1;color:#bbf7d0;color:rgba(187,247,208,var(--tw-text-opacity))}.highlight-diff .gd{--tw-text-opacity:1;background-color:rgba(239,68,68,.3);color:#7f1d1d;color:rgba(127,29,29,var(--tw-text-opacity));display:inline-block;padding-left:1rem;padding-right:1rem;width:100%}.dark .highlight-diff .gd{--tw-bg-opacity:1;--tw-text-opacity:1;color:#bbf7d0;color:rgba(187,247,208,var(--tw-text-opacity))}.guilabel,.menuselection{border-color:#e2e8f0;border-color:hsl(var(--border));border-radius:calc(.5rem - 4px);border-radius:calc(var(--radius) - 4px);border-width:1px;color:#0f172a;color:hsl(var(--accent-foreground));font-weight:500;padding:1px .5rem}#content kbd:not(.compound){background-color:#f1f5f9;background-color:hsl(var(--muted));border-radius:.25rem;border-width:1px;font-size:.875rem;font-weight:500;letter-spacing:.025em;line-height:1.25rem;padding:1px .25rem}.sig{border-color:#e2e8f0;border-color:hsl(var(--border));border-top-width:1px;font-family:JetBrains\ Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-weight:700;padding-top:1.5rem;scroll-margin:5rem}.sig-name{color:#0f172a;color:hsl(var(--accent-foreground))}em.property{color:#64748b;color:hsl(var(--muted-foreground))}.option .sig-prename{font-style:italic}.viewcode-link{color:#64748b;color:hsl(var(--muted-foreground));float:right}.option-list kbd{background-color:transparent!important;border-style:none!important;font-size:1em!important;font-weight:700!important}.headerlink{align-items:center;display:inline-flex;margin-left:.25rem;position:relative;vertical-align:middle}.headerlink:after{-webkit-font-smoothing:subpixel-antialiased;word-wrap:break-word;--tw-bg-opacity:0.75;background-color:#f1f5f9;background-color:hsl(var(--muted));border-radius:calc(.5rem - 4px);border-radius:calc(var(--radius) - 4px);color:#64748b;color:hsl(var(--muted-foreground));content:attr(data-tooltip);display:none;font-family:ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,Helvetica Neue,Arial,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;font-size:.75rem;font-weight:400;letter-spacing:normal;line-height:1rem;opacity:0;padding:.25rem;pointer-events:none;position:absolute;text-align:center;text-decoration-line:none;text-shadow:none;text-transform:none;white-space:pre;z-index:1000000}.headerlink:focus:after,.headerlink:focus:before,.headerlink:hover:after,.headerlink:hover:before{animation-delay:.2s;animation-duration:.4s;animation-fill-mode:forwards;animation-name:tooltip-appear;animation-timing-function:ease-in;display:inline-block;-webkit-text-decoration:none;text-decoration:none}.headerlink:after{margin-top:6px;right:50%;top:100%}.headerlink:before{border-bottom-color:#1a202c;bottom:-7px;margin-right:-6px;right:50%;top:auto}.headerlink:after{margin-right:-16px}.headerlink>*{fill:currentColor;color:#64748b;color:hsl(var(--muted-foreground));visibility:hidden}.admonition-title:hover .headerlink,.admonition-title:hover .headerlink>*,.code-block-caption:hover .headerlink,.code-block-caption:hover .headerlink>*,.headerlink:focus>*,dt:not(.does-not-exist):hover .headerlink,dt:not(.does-not-exist):hover .headerlink>*,figure:not(.does-not-exist):hover .headerlink,figure:not(.does-not-exist):hover .headerlink>*,h1:not(.does-not-exist):hover .headerlink,h1:not(.does-not-exist):hover .headerlink>*,h2:not(.does-not-exist):hover .headerlink,h2:not(.does-not-exist):hover .headerlink>*,h3:not(.does-not-exist):hover .headerlink,h3:not(.does-not-exist):hover .headerlink>*,h4:not(.does-not-exist):hover .headerlink,h4:not(.does-not-exist):hover .headerlink>*,table:not(.does-not-exist):hover .headerlink,table:not(.does-not-exist):hover .headerlink>*{visibility:visible}#left-sidebar .caption{border-radius:calc(.5rem - 2px);border-radius:calc(var(--radius) - 2px);font-size:.875rem;font-weight:600;line-height:1.25rem;margin-bottom:.25rem;padding:1.5rem .5rem .25rem}#left-sidebar .caption:first-child{padding-top:0}#left-sidebar ul{display:grid;font-size:.875rem;grid-auto-flow:row;grid-auto-rows:max-content;line-height:1.25rem;overflow:hidden;transform:translate3d(var(--tw-translate-x),var(--tw-translate-y),0) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transition-duration:.3s;transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1)}@media (prefers-reduced-motion:reduce){#left-sidebar ul{transition-property:none}}#left-sidebar ul ul{margin-left:.75rem;opacity:1;padding:.5rem 0 .5rem .75rem;position:relative;transition-duration:.5s;transition-property:opacity;transition-timing-function:cubic-bezier(.4,0,.2,1)}#left-sidebar ul ul:before{--tw-bg-opacity:1;--tw-content:"";background-color:#e5e7eb;background-color:rgba(229,231,235,var(--tw-bg-opacity));bottom:.25rem;content:"";content:var(--tw-content);left:0;position:absolute;top:.25rem;width:1px}.dark #left-sidebar ul ul:before{--tw-bg-opacity:1;background-color:#262626;background-color:rgba(38,38,38,var(--tw-bg-opacity));content:var(--tw-content)}#left-sidebar a{align-items:center;border-color:transparent;border-radius:calc(.5rem - 2px);border-radius:calc(var(--radius) - 2px);border-width:1px;display:flex;padding:.375rem .5rem;width:100%}#left-sidebar a:hover{text-decoration-line:underline}#left-sidebar a:focus-visible{outline-offset:-1px}#left-sidebar a>button{border-radius:.25rem;color:#64748b;color:hsl(var(--muted-foreground))}#left-sidebar a>button:hover{background-color:rgba(15,23,42,.1);background-color:hsl(var(--primary)/.1)}#left-sidebar a>button>svg{transform:translate3d(var(--tw-translate-x),var(--tw-translate-y),0) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform-origin:center;transition-duration:.15s;transition-property:transform;transition-timing-function:cubic-bezier(.4,0,.2,1)}#left-sidebar a.current{background-color:#f1f5f9;background-color:hsl(var(--accent));border-color:#e2e8f0;border-color:hsl(var(--border));border-width:1px;color:#0f172a;color:hsl(var(--accent-foreground));font-weight:500}#left-sidebar a.expandable{justify-content:space-between}#left-sidebar a.expandable.expanded>button>svg{--tw-rotate:90deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(90deg) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}#right-sidebar ul{margin:0}#right-sidebar ul li{margin-top:0;padding-top:.5rem}#right-sidebar ul li a{color:#64748b;color:hsl(var(--muted-foreground));display:inline-block;text-decoration-line:none;transition-duration:.15s;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1)}#right-sidebar ul li a:hover{color:#0f172a;color:hsl(var(--foreground))}#right-sidebar ul li a:focus-visible{outline-offset:-1px}#right-sidebar ul li a[data-current=true]{color:#0f172a;color:hsl(var(--foreground));font-weight:500}#right-sidebar ul li ul{padding-left:1rem}#right-sidebar ul:not(:last-child){padding-bottom:.5rem}.contents>:not([hidden])~:not([hidden]),.toctree-wrapper>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-bottom:0;margin-bottom:calc(.5rem*var(--tw-space-y-reverse));margin-top:.5rem;margin-top:calc(.5rem*(1 - var(--tw-space-y-reverse)))}.contents,.toctree-wrapper{font-size:.875rem;line-height:1.25rem}.contents .caption,.contents .topic-title,.toctree-wrapper .caption,.toctree-wrapper .topic-title{font-weight:500;padding-top:1.5rem}.contents ul,.toctree-wrapper ul{list-style-type:none!important;margin:0!important}.contents ul li a.reference,.toctree-wrapper ul li a.reference{color:#64748b!important;color:hsl(var(--muted-foreground))!important;display:inline-block;font-weight:400!important;text-decoration-line:none!important;transition-duration:.15s;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1)}.contents ul li a.reference:hover,.toctree-wrapper ul li a.reference:hover{color:#0f172a;color:hsl(var(--foreground))}.contents ul li ul,.toctree-wrapper ul li ul{padding-left:1rem}.contents ul:not(:last-child),.toctree-wrapper ul:not(:last-child){padding-bottom:.5rem}#search-results .search-summary{color:#64748b;color:hsl(var(--muted-foreground));font-size:1.25rem;line-height:1.75rem;margin-top:1.5rem}#search-results ul.search,#search-results ul.search li{margin-top:1.5rem}#search-results ul.search .context{color:#64748b;color:hsl(var(--muted-foreground));font-size:.875rem;line-height:1.25rem;margin-top:.5rem}.highlighted{background-color:#f1f5f9;background-color:hsl(var(--accent));text-decoration-line:underline;text-decoration-thickness:2px}.highlight-link{border-color:#e2e8f0;border-color:hsl(var(--border));border-radius:.5rem;border-radius:var(--radius);border-width:1px;font-size:.875rem;line-height:1.25rem;padding:.5rem 1rem;position:fixed;right:.5rem;top:4rem}.highlight-link:hover{background-color:#f1f5f9;background-color:hsl(var(--accent))}@media (min-width:1024px){.highlight-link{right:4rem}}.tooltipped{position:relative}.tooltipped:after{-webkit-font-smoothing:subpixel-antialiased;word-wrap:break-word;--tw-bg-opacity:0.75;background-color:#f1f5f9;background-color:hsl(var(--muted));border-radius:calc(.5rem - 4px);border-radius:calc(var(--radius) - 4px);color:#64748b;color:hsl(var(--muted-foreground));content:attr(data-tooltip);display:none;font-family:ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,Helvetica Neue,Arial,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;font-size:.75rem;font-weight:400;letter-spacing:normal;line-height:1rem;opacity:0;padding:.25rem;pointer-events:none;position:absolute;text-align:center;text-decoration-line:none;text-shadow:none;text-transform:none;white-space:pre;z-index:1000000}@keyframes tooltip-appear{0%{opacity:0}to{opacity:1}}.tooltipped:focus:after,.tooltipped:focus:before,.tooltipped:hover:after,.tooltipped:hover:before{animation-delay:.2s;animation-duration:.4s;animation-fill-mode:forwards;animation-name:tooltip-appear;animation-timing-function:ease-in;display:inline-block;-webkit-text-decoration:none;text-decoration:none}.tooltipped-no-delay:focus:after,.tooltipped-no-delay:focus:before,.tooltipped-no-delay:hover:after,.tooltipped-no-delay:hover:before{animation-delay:0s}.tooltipped-multiline:focus:after,.tooltipped-multiline:hover:after{display:table-cell}.tooltipped-s:after,.tooltipped-se:after,.tooltipped-sw:after{margin-top:6px;right:50%;top:100%}.tooltipped-s:before,.tooltipped-se:before,.tooltipped-sw:before{border-bottom-color:#1a202c;bottom:-7px;margin-right:-6px;right:50%;top:auto}.tooltipped-se:after{left:50%;margin-left:-16px;right:auto}.tooltipped-sw:after{margin-right:-16px}.tooltipped-n:after,.tooltipped-ne:after,.tooltipped-nw:after{bottom:100%;margin-bottom:6px;right:50%}.tooltipped-n:before,.tooltipped-ne:before,.tooltipped-nw:before{border-top-color:#1a202c;bottom:auto;margin-right:-6px;right:50%;top:-7px}.tooltipped-ne:after{left:50%;margin-left:-16px;right:auto}.tooltipped-nw:after{margin-right:-16px}.tooltipped-n:after,.tooltipped-s:after{transform:translateX(50%)}.tooltipped-w:after{bottom:50%;margin-right:6px;right:100%;transform:translateY(50%)}.tooltipped-w:before{border-left-color:#1a202c;bottom:50%;left:-7px;margin-top:-6px;top:50%}.tooltipped-e:after{bottom:50%;left:100%;margin-left:6px;transform:translateY(50%)}.tooltipped-e:before{border-right-color:#1a202c;bottom:50%;margin-top:-6px;right:-7px;top:50%}.sr-only{clip:rect(0,0,0,0);border-width:0;height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;white-space:nowrap;width:1px}.pointer-events-none{pointer-events:none}.invisible{visibility:hidden}.collapse{visibility:collapse}.fixed{position:fixed}.absolute{position:absolute}.relative{position:relative}.sticky{position:sticky}.inset-0{left:0;right:0}.inset-0,.inset-y-0{bottom:0;top:0}.bottom-8{bottom:2rem}.left-0{left:0}.right-1{right:.25rem}.right-1\.5{right:.375rem}.right-4{right:1rem}.right-8{right:2rem}.top-0{top:0}.top-16{top:4rem}.top-2{top:.5rem}.top-4{top:1rem}.z-10{z-index:10}.z-20{z-index:20}.z-40{z-index:40}.z-50{z-index:50}.z-\[100\]{z-index:100}.mx-auto{margin-left:auto;margin-right:auto}.my-4{margin-bottom:1rem;margin-top:1rem}.my-6{margin-bottom:1.5rem;margin-top:1.5rem}.my-8{margin-bottom:2rem;margin-top:2rem}.-mt-10{margin-top:-2.5rem}.mb-4{margin-bottom:1rem}.mb-\[2px\]{margin-bottom:2px}.ml-0{margin-left:0}.ml-2{margin-left:.5rem}.ml-auto{margin-left:auto}.mr-1{margin-right:.25rem}.mr-2{margin-right:.5rem}.mr-4{margin-right:1rem}.mr-6{margin-right:1.5rem}.mr-auto{margin-right:auto}.mt-12{margin-top:3rem}.mt-4{margin-top:1rem}.block{display:block}.inline{display:inline}.flex{display:flex}.inline-flex{display:inline-flex}.table{display:table}.hidden{display:none}.h-10{height:2.5rem}.h-14{height:3.5rem}.h-4{height:1rem}.h-5{height:1.25rem}.h-6{height:1.5rem}.h-9{height:2.25rem}.h-\[14px\]{height:14px}.h-\[calc\(100vh-8rem\)\]{height:calc(100vh - 8rem)}.h-full{height:100%}.max-h-\[calc\(var\(--vh\)-4rem\)\]{max-height:calc(var(--vh) - 4rem)}.min-h-screen{min-height:100vh}.w-4{width:1rem}.w-5\/6{width:83.333333%}.w-6{width:1.5rem}.w-9{width:2.25rem}.w-\[14px\]{width:14px}.w-full{width:100%}.min-w-0{min-width:0}.min-w-full{min-width:100%}.max-w-prose{max-width:65ch}.flex-1{flex:1 1 0%}.shrink-0{flex-shrink:0}.-translate-x-full{--tw-translate-x:-100%;transform:translate(-100%,var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.translate-x-0{--tw-translate-x:0px;transform:translateY(var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.rotate-0{--tw-rotate:0deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(0deg) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.rotate-90{--tw-rotate:90deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(90deg) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.scale-0{--tw-scale-x:0;--tw-scale-y:0;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(0) scaleY(0);transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.scale-100{--tw-scale-x:1;--tw-scale-y:1;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(1) scaleY(1)}.scale-100,.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.select-none{-webkit-user-select:none;-moz-user-select:none;user-select:none}.flex-col{flex-direction:column}.items-start{align-items:flex-start}.items-center{align-items:center}.\!justify-start{justify-content:flex-start!important}.justify-start{justify-content:flex-start}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.gap-1{gap:.25rem}.gap-4{gap:1rem}.space-x-1>:not([hidden])~:not([hidden]){--tw-space-x-reverse:0;margin-left:.25rem;margin-left:calc(.25rem*(1 - var(--tw-space-x-reverse)));margin-right:0;margin-right:calc(.25rem*var(--tw-space-x-reverse))}.space-x-2>:not([hidden])~:not([hidden]){--tw-space-x-reverse:0;margin-left:.5rem;margin-left:calc(.5rem*(1 - var(--tw-space-x-reverse)));margin-right:0;margin-right:calc(.5rem*var(--tw-space-x-reverse))}.space-x-6>:not([hidden])~:not([hidden]){--tw-space-x-reverse:0;margin-left:1.5rem;margin-left:calc(1.5rem*(1 - var(--tw-space-x-reverse)));margin-right:0;margin-right:calc(1.5rem*var(--tw-space-x-reverse))}.space-y-2>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-bottom:0;margin-bottom:calc(.5rem*var(--tw-space-y-reverse));margin-top:.5rem;margin-top:calc(.5rem*(1 - var(--tw-space-y-reverse)))}.overflow-hidden{overflow:hidden}.overflow-y-auto{overflow-y:auto}.scroll-smooth{scroll-behavior:smooth}.text-ellipsis{text-overflow:ellipsis}.text-clip{text-overflow:clip}.whitespace-nowrap{white-space:nowrap}.rounded{border-radius:.25rem}.rounded-\[0\.5rem\]{border-radius:.5rem}.rounded-md{border-radius:calc(.5rem - 2px);border-radius:calc(var(--radius) - 2px)}.rounded-sm{border-radius:calc(.5rem - 4px);border-radius:calc(var(--radius) - 4px)}.border{border-width:1px}.border-b{border-bottom-width:1px}.border-r{border-right-width:1px}.border-t{border-top-width:1px}.border-border{border-color:#e2e8f0;border-color:hsl(var(--border))}.border-input{border-color:#e2e8f0;border-color:hsl(var(--input))}.bg-background{background-color:#fff;background-color:hsl(var(--background))}.bg-background\/80{background-color:hsla(0,0%,100%,.8);background-color:hsl(var(--background)/.8)}.bg-background\/95{background-color:hsla(0,0%,100%,.95);background-color:hsl(var(--background)/.95)}.bg-gray-700{--tw-bg-opacity:1;background-color:#374151;background-color:rgba(55,65,81,var(--tw-bg-opacity))}.bg-muted{background-color:#f1f5f9;background-color:hsl(var(--muted))}.bg-transparent{background-color:transparent}.fill-current{fill:currentColor}.p-2{padding:.5rem}.p-4{padding:1rem}.p-6{padding:1.5rem}.px-0{padding-left:0;padding-right:0}.px-1{padding-left:.25rem;padding-right:.25rem}.px-1\.5{padding-left:.375rem;padding-right:.375rem}.px-4{padding-left:1rem;padding-right:1rem}.px-8{padding-left:2rem;padding-right:2rem}.py-2{padding-bottom:.5rem;padding-top:.5rem}.py-6{padding-bottom:1.5rem;padding-top:1.5rem}.pr-6{padding-right:1.5rem}.pt-2{padding-top:.5rem}.pt-6{padding-top:1.5rem}.text-center{text-align:center}.font-mono{font-family:JetBrains\ Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}.font-sans{font-family:ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,Helvetica Neue,Arial,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji}.text-\[10px\]{font-size:10px}.text-base{font-size:1rem;line-height:1.5rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.text-xs{font-size:.75rem;line-height:1rem}.font-bold{font-weight:700}.font-medium{font-weight:500}.leading-loose{line-height:2}.text-foreground{color:#0f172a;color:hsl(var(--foreground))}.text-foreground\/60{color:rgba(15,23,42,.6);color:hsl(var(--foreground)/.6)}.text-muted-foreground{color:#64748b;color:hsl(var(--muted-foreground))}.text-red-700{--tw-text-opacity:1;color:#b91c1c;color:rgba(185,28,28,var(--tw-text-opacity))}.text-white{--tw-text-opacity:1;color:#fff;color:rgba(255,255,255,var(--tw-text-opacity))}.underline{text-decoration-line:underline}.no-underline{text-decoration-line:none}.underline-offset-4{text-underline-offset:4px}.antialiased{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.opacity-0{opacity:0}.opacity-100{opacity:1}.opacity-70{opacity:.7}.shadow-sm{--tw-shadow:0 1px 2px 0 rgba(0,0,0,.05);--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:0 0 transparent,0 0 transparent,0 1px 2px 0 rgba(0,0,0,.05);box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow)}.ring-offset-background{--tw-ring-offset-color:hsl(var(--background))}.backdrop-blur{--tw-backdrop-blur:blur(8px);-webkit-backdrop-filter:blur(8px) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:blur(8px) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia)}.backdrop-blur-sm{--tw-backdrop-blur:blur(4px);-webkit-backdrop-filter:blur(4px) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:blur(4px) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia)}.transition{transition-duration:.15s;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,-webkit-backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter,-webkit-backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1)}.transition-all{transition-duration:.15s;transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1)}.transition-colors{transition-duration:.15s;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1)}.transition-opacity{transition-duration:.15s;transition-property:opacity;transition-timing-function:cubic-bezier(.4,0,.2,1)}.duration-100{transition-duration:.1s}.duration-1000{transition-duration:1s}[x-cloak]{display:none!important}@media (max-width:640px){.container{padding-left:1rem;padding-right:1rem}}.hover\:bg-accent:hover{background-color:#f1f5f9;background-color:hsl(var(--accent))}.hover\:bg-gray-950:hover{--tw-bg-opacity:1;background-color:#030712;background-color:rgba(3,7,18,var(--tw-bg-opacity))}.hover\:bg-muted:hover{background-color:#f1f5f9;background-color:hsl(var(--muted))}.hover\:bg-transparent:hover{background-color:transparent}.hover\:text-accent-foreground:hover{color:#0f172a;color:hsl(var(--accent-foreground))}.hover\:text-foreground:hover{color:#0f172a;color:hsl(var(--foreground))}.hover\:text-foreground\/80:hover{color:rgba(15,23,42,.8);color:hsl(var(--foreground)/.8)}.hover\:placeholder-accent-foreground:hover::-moz-placeholder{color:#0f172a;color:hsl(var(--accent-foreground))}.hover\:placeholder-accent-foreground:hover::placeholder{color:#0f172a;color:hsl(var(--accent-foreground))}.hover\:opacity-100:hover{opacity:1}.focus\:translate-x-0:focus{--tw-translate-x:0px;transform:translateY(var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.focus\:bg-accent:focus{background-color:#f1f5f9;background-color:hsl(var(--accent))}.focus\:bg-gray-950:focus{--tw-bg-opacity:1;background-color:#030712;background-color:rgba(3,7,18,var(--tw-bg-opacity))}.focus\:text-accent-foreground:focus{color:#0f172a;color:hsl(var(--accent-foreground))}.focus\:opacity-100:focus{opacity:1}.focus-visible\:outline-none:focus-visible{outline:2px solid transparent;outline-offset:2px}.focus-visible\:outline-offset-\[-1px\]:focus-visible{outline-offset:-1px}.focus-visible\:ring-2:focus-visible{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color),var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color),0 0 transparent;box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 transparent)}.focus-visible\:ring-ring:focus-visible{--tw-ring-color:hsl(var(--ring))}.focus-visible\:ring-offset-2:focus-visible{--tw-ring-offset-width:2px}.disabled\:pointer-events-none:disabled{pointer-events:none}.disabled\:opacity-50:disabled{opacity:.5}.group:hover .group-hover\:bg-accent{background-color:#f1f5f9;background-color:hsl(var(--accent))}.group:hover .group-hover\:text-accent-foreground{color:#0f172a;color:hsl(var(--accent-foreground))}.dark .dark\:block{display:block}.dark .dark\:hidden{display:none}.dark .dark\:-rotate-90{--tw-rotate:-90deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(-90deg) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.dark .dark\:rotate-0{--tw-rotate:0deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(0deg) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.dark .dark\:scale-0{--tw-scale-x:0;--tw-scale-y:0;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(0) scaleY(0);transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.dark .dark\:scale-100{--tw-scale-x:1;--tw-scale-y:1;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(1) scaleY(1);transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.dark .dark\:invert{--tw-invert:invert(100%);filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) invert(100%) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow);filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}@media (min-width:640px){.sm\:inline-block{display:inline-block}.sm\:flex{display:flex}.sm\:space-x-4>:not([hidden])~:not([hidden]){--tw-space-x-reverse:0;margin-left:1rem;margin-left:calc(1rem*(1 - var(--tw-space-x-reverse)));margin-right:0;margin-right:calc(1rem*var(--tw-space-x-reverse))}.sm\:pr-12{padding-right:3rem}}@media (min-width:768px){.md\:sticky{position:sticky}.md\:top-14{top:3.5rem}.md\:z-30{z-index:30}.md\:my-0{margin-bottom:0;margin-top:0}.md\:-ml-2{margin-left:-.5rem}.md\:inline{display:inline}.md\:flex{display:flex}.md\:grid{display:grid}.md\:\!hidden{display:none!important}.md\:hidden{display:none}.md\:h-24{height:6rem}.md\:h-\[calc\(100vh-3\.5rem\)\]{height:calc(100vh - 3.5rem)}.md\:h-auto{height:auto}.md\:w-40{width:10rem}.md\:w-auto{width:auto}.md\:w-full{width:100%}.md\:flex-none{flex:none}.md\:translate-x-0{--tw-translate-x:0px;transform:translateY(var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.md\:grid-cols-\[220px_minmax\(0\2c 1fr\)\]{grid-template-columns:220px minmax(0,1fr)}.md\:flex-row{flex-direction:row}.md\:justify-end{justify-content:flex-end}.md\:gap-2{gap:.5rem}.md\:gap-6{gap:1.5rem}.md\:overflow-auto{overflow:auto}.md\:bg-transparent{background-color:transparent}.md\:p-0{padding:0}.md\:px-0{padding-left:0;padding-right:0}.md\:py-0{padding-bottom:0;padding-top:0}.md\:text-left{text-align:left}}@media (min-width:1024px){.lg\:my-8{margin-bottom:2rem;margin-top:2rem}.lg\:w-64{width:16rem}.lg\:grid-cols-\[240px_minmax\(0\2c 1fr\)\]{grid-template-columns:240px minmax(0,1fr)}.lg\:gap-10{gap:2.5rem}.lg\:py-8{padding-bottom:2rem;padding-top:2rem}}@media (min-width:1280px){.xl\:block{display:block}.xl\:grid{display:grid}.xl\:grid-cols-\[1fr_300px\]{grid-template-columns:1fr 300px}} diff --git a/docs/built_with_sphinx/html/_static/theme.js b/docs/built_with_sphinx/html/_static/theme.js new file mode 100644 index 0000000..d7b95a7 --- /dev/null +++ b/docs/built_with_sphinx/html/_static/theme.js @@ -0,0 +1,2 @@ +/*! For license information please see theme.js.LICENSE.txt */ +!function(){var e={13:function(e){var t;t=function(){return function(){var e={686:function(e,t,n){"use strict";n.d(t,{default:function(){return x}});var r=n(279),i=n.n(r),o=n(370),a=n.n(o),s=n(817),l=n.n(s);function c(e){try{return document.execCommand(e)}catch(e){return!1}}var u=function(e){var t=l()(e);return c("cut"),t},f=function(e,t){var n=function(e){var t="rtl"===document.documentElement.getAttribute("dir"),n=document.createElement("textarea");n.style.fontSize="12pt",n.style.border="0",n.style.padding="0",n.style.margin="0",n.style.position="absolute",n.style[t?"right":"left"]="-9999px";var r=window.pageYOffset||document.documentElement.scrollTop;return n.style.top="".concat(r,"px"),n.setAttribute("readonly",""),n.value=e,n}(e);t.container.appendChild(n);var r=l()(n);return c("copy"),n.remove(),r},d=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{container:document.body},n="";return"string"==typeof e?n=f(e,t):e instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(null==e?void 0:e.type)?n=f(e.value,t):(n=l()(e),c("copy")),n};function p(e){return p="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},p(e)}function _(e){return _="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},_(e)}function h(e,t){for(var n=0;n0&&void 0!==arguments[0]?arguments[0]:{};this.action="function"==typeof e.action?e.action:this.defaultAction,this.target="function"==typeof e.target?e.target:this.defaultTarget,this.text="function"==typeof e.text?e.text:this.defaultText,this.container="object"===_(e.container)?e.container:document.body}},{key:"listenClick",value:function(e){var t=this;this.listener=a()(e,"click",(function(e){return t.onClick(e)}))}},{key:"onClick",value:function(e){var t=e.delegateTarget||e.currentTarget,n=this.action(t)||"copy",r=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.action,n=void 0===t?"copy":t,r=e.container,i=e.target,o=e.text;if("copy"!==n&&"cut"!==n)throw new Error('Invalid "action" value, use either "copy" or "cut"');if(void 0!==i){if(!i||"object"!==p(i)||1!==i.nodeType)throw new Error('Invalid "target" value, use a valid Element');if("copy"===n&&i.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if("cut"===n&&(i.hasAttribute("readonly")||i.hasAttribute("disabled")))throw new Error('Invalid "target" attribute. You can\'t cut text from elements with "readonly" or "disabled" attributes')}return o?d(o,{container:r}):i?"cut"===n?u(i):d(i,{container:r}):void 0}({action:n,container:this.container,target:this.target(t),text:this.text(t)});this.emit(r?"success":"error",{action:n,text:r,trigger:t,clearSelection:function(){t&&t.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(e){return m("action",e)}},{key:"defaultTarget",value:function(e){var t=m("target",e);if(t)return document.querySelector(t)}},{key:"defaultText",value:function(e){return m("text",e)}},{key:"destroy",value:function(){this.listener.destroy()}}],r=[{key:"copy",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{container:document.body};return d(e,t)}},{key:"cut",value:function(e){return u(e)}},{key:"isSupported",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:["copy","cut"],t="string"==typeof e?[e]:e,n=!!document.queryCommandSupported;return t.forEach((function(e){n=n&&!!document.queryCommandSupported(e)})),n}}],n&&h(t.prototype,n),r&&h(t,r),l}(i()),x=g},828:function(e){if("undefined"!=typeof Element&&!Element.prototype.matches){var t=Element.prototype;t.matches=t.matchesSelector||t.mozMatchesSelector||t.msMatchesSelector||t.oMatchesSelector||t.webkitMatchesSelector}e.exports=function(e,t){for(;e&&9!==e.nodeType;){if("function"==typeof e.matches&&e.matches(t))return e;e=e.parentNode}}},438:function(e,t,n){var r=n(828);function i(e,t,n,r,i){var a=o.apply(this,arguments);return e.addEventListener(n,a,i),{destroy:function(){e.removeEventListener(n,a,i)}}}function o(e,t,n,i){return function(n){n.delegateTarget=r(n.target,t),n.delegateTarget&&i.call(e,n)}}e.exports=function(e,t,n,r,o){return"function"==typeof e.addEventListener?i.apply(null,arguments):"function"==typeof n?i.bind(null,document).apply(null,arguments):("string"==typeof e&&(e=document.querySelectorAll(e)),Array.prototype.map.call(e,(function(e){return i(e,t,n,r,o)})))}},879:function(e,t){t.node=function(e){return void 0!==e&&e instanceof HTMLElement&&1===e.nodeType},t.nodeList=function(e){var n=Object.prototype.toString.call(e);return void 0!==e&&("[object NodeList]"===n||"[object HTMLCollection]"===n)&&"length"in e&&(0===e.length||t.node(e[0]))},t.string=function(e){return"string"==typeof e||e instanceof String},t.fn=function(e){return"[object Function]"===Object.prototype.toString.call(e)}},370:function(e,t,n){var r=n(879),i=n(438);e.exports=function(e,t,n){if(!e&&!t&&!n)throw new Error("Missing required arguments");if(!r.string(t))throw new TypeError("Second argument must be a String");if(!r.fn(n))throw new TypeError("Third argument must be a Function");if(r.node(e))return function(e,t,n){return e.addEventListener(t,n),{destroy:function(){e.removeEventListener(t,n)}}}(e,t,n);if(r.nodeList(e))return function(e,t,n){return Array.prototype.forEach.call(e,(function(e){e.addEventListener(t,n)})),{destroy:function(){Array.prototype.forEach.call(e,(function(e){e.removeEventListener(t,n)}))}}}(e,t,n);if(r.string(e))return function(e,t,n){return i(document.body,e,t,n)}(e,t,n);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}},817:function(e){e.exports=function(e){var t;if("SELECT"===e.nodeName)e.focus(),t=e.value;else if("INPUT"===e.nodeName||"TEXTAREA"===e.nodeName){var n=e.hasAttribute("readonly");n||e.setAttribute("readonly",""),e.select(),e.setSelectionRange(0,e.value.length),n||e.removeAttribute("readonly"),t=e.value}else{e.hasAttribute("contenteditable")&&e.focus();var r=window.getSelection(),i=document.createRange();i.selectNodeContents(e),r.removeAllRanges(),r.addRange(i),t=r.toString()}return t}},279:function(e){function t(){}t.prototype={on:function(e,t,n){var r=this.e||(this.e={});return(r[e]||(r[e]=[])).push({fn:t,ctx:n}),this},once:function(e,t,n){var r=this;function i(){r.off(e,i),t.apply(n,arguments)}return i._=t,this.on(e,i,n)},emit:function(e){for(var t=[].slice.call(arguments,1),n=((this.e||(this.e={}))[e]||[]).slice(),r=0,i=n.length;rvoid 0!==e)),i.length?i.join(" ").trim():n}var i,o,a,s,l=!1,c=!1,u=[],f=-1;function d(e){let t=u.indexOf(e);-1!==t&&t>f&&u.splice(t,1)}function p(){l=!1,c=!0;for(let e=0;ey(e,t)));let n=!1;if(t(e,(()=>n=!0)),n)return;let r=e.firstElementChild;for(;r;)y(r,t),r=r.nextElementSibling}function m(e,...t){console.warn(`Alpine Warning: ${e}`,...t)}var g=!1,x=[],b=[];function w(){return x.map((e=>e()))}function E(){return x.concat(b).map((e=>e()))}function S(e){x.push(e)}function A(e){b.push(e)}function O(e,t=!1){return k(e,(e=>{if((t?E():w()).some((t=>e.matches(t))))return!0}))}function k(e,t){if(e){if(t(e))return e;if(e._x_teleportBack&&(e=e._x_teleportBack),e.parentElement)return k(e.parentElement,t)}}var j=[];function C(e,t=y,n=(()=>{})){!function(r){we=!0;let i=Symbol();Se=i,Ee.set(i,[]);let o=()=>{for(;Ee.get(i).length;)Ee.get(i).shift()();Ee.delete(i)};t(e,((e,t)=>{n(e,t),j.forEach((n=>n(e,t))),xe(e,e.attributes).forEach((e=>e())),e._x_ignore&&t()})),we=!1,o()}()}function T(e){y(e,(e=>{q(e),function(e){if(e._x_cleanups)for(;e._x_cleanups.length;)e._x_cleanups.pop()()}(e)}))}var L=[],$=[],M=[];function N(e,t){"function"==typeof t?(e._x_cleanups||(e._x_cleanups=[]),e._x_cleanups.push(t)):(t=e,$.push(t))}function P(e){L.push(e)}function R(e,t,n){e._x_attributeCleanups||(e._x_attributeCleanups={}),e._x_attributeCleanups[t]||(e._x_attributeCleanups[t]=[]),e._x_attributeCleanups[t].push(n)}function q(e,t){e._x_attributeCleanups&&Object.entries(e._x_attributeCleanups).forEach((([n,r])=>{(void 0===t||t.includes(n))&&(r.forEach((e=>e())),delete e._x_attributeCleanups[n])}))}var I=new MutationObserver(J),z=!1;function B(){I.observe(document,{subtree:!0,childList:!0,attributes:!0,attributeOldValue:!0}),z=!0}function D(){(F=F.concat(I.takeRecords())).length&&!H&&(H=!0,queueMicrotask((()=>{J(F),F.length=0,H=!1}))),I.disconnect(),z=!1}var F=[],H=!1;function W(e){if(!z)return e();D();let t=e();return B(),t}var V=!1,U=[];function J(e){if(V)return void(U=U.concat(e));let t=[],n=[],r=new Map,i=new Map;for(let o=0;o1===e.nodeType&&t.push(e))),e[o].removedNodes.forEach((e=>1===e.nodeType&&n.push(e)))),"attributes"===e[o].type)){let t=e[o].target,n=e[o].attributeName,a=e[o].oldValue,s=()=>{r.has(t)||r.set(t,[]),r.get(t).push({name:n,value:t.getAttribute(n)})},l=()=>{i.has(t)||i.set(t,[]),i.get(t).push(n)};t.hasAttribute(n)&&null===a?s():t.hasAttribute(n)?(l(),s()):l()}i.forEach(((e,t)=>{q(t,e)})),r.forEach(((e,t)=>{L.forEach((n=>n(t,e)))}));for(let e of n)t.includes(e)||($.forEach((t=>t(e))),T(e));t.forEach((e=>{e._x_ignoreSelf=!0,e._x_ignore=!0}));for(let e of t)n.includes(e)||e.isConnected&&(delete e._x_ignoreSelf,delete e._x_ignore,M.forEach((t=>t(e))),e._x_ignore=!0,e._x_ignoreSelf=!0);t.forEach((e=>{delete e._x_ignoreSelf,delete e._x_ignore})),t=null,n=null,r=null,i=null}function K(e){return Y(X(e))}function Z(e,t,n){return e._x_dataStack=[t,...X(n||e)],()=>{e._x_dataStack=e._x_dataStack.filter((e=>e!==t))}}function X(e){return e._x_dataStack?e._x_dataStack:"function"==typeof ShadowRoot&&e instanceof ShadowRoot?X(e.host):e.parentNode?X(e.parentNode):[]}function Y(e){return new Proxy({objects:e},G)}var G={ownKeys({objects:e}){return Array.from(new Set(e.flatMap((e=>Object.keys(e)))))},has({objects:e},t){return t!=Symbol.unscopables&&e.some((e=>Object.prototype.hasOwnProperty.call(e,t)))},get({objects:e},t,n){return"toJSON"==t?Q:Reflect.get(e.find((e=>Object.prototype.hasOwnProperty.call(e,t)))||{},t,n)},set({objects:e},t,n,r){const i=e.find((e=>Object.prototype.hasOwnProperty.call(e,t)))||e[e.length-1],o=Object.getOwnPropertyDescriptor(i,t);return o?.set&&o?.get?Reflect.set(i,t,n,r):Reflect.set(i,t,n)}};function Q(){return Reflect.ownKeys(this).reduce(((e,t)=>(e[t]=Reflect.get(this,t),e)),{})}function ee(e){let t=(n,r="")=>{Object.entries(Object.getOwnPropertyDescriptors(n)).forEach((([i,{value:o,enumerable:a}])=>{if(!1===a||void 0===o)return;let s=""===r?i:`${r}.${i}`;var l;"object"==typeof o&&null!==o&&o._x_interceptor?n[i]=o.initialize(e,s,i):"object"!=typeof(l=o)||Array.isArray(l)||null===l||o===n||o instanceof Element||t(o,s)}))};return t(e)}function te(e,t=(()=>{})){let n={initialValue:void 0,_x_interceptor:!0,initialize(t,n,r){return e(this.initialValue,(()=>function(e,t){return t.split(".").reduce(((e,t)=>e[t]),e)}(t,n)),(e=>ne(t,n,e)),n,r)}};return t(n),e=>{if("object"==typeof e&&null!==e&&e._x_interceptor){let t=n.initialize.bind(n);n.initialize=(r,i,o)=>{let a=e.initialize(r,i,o);return n.initialValue=a,t(r,i,o)}}else n.initialValue=e;return n}}function ne(e,t,n){if("string"==typeof t&&(t=t.split(".")),1!==t.length){if(0===t.length)throw error;return e[t[0]]||(e[t[0]]={}),ne(e[t[0]],t.slice(1),n)}e[t[0]]=n}var re={};function ie(e,t){re[e]=t}function oe(e,t){return Object.entries(re).forEach((([n,r])=>{let i=null;Object.defineProperty(e,`$${n}`,{get(){return r(t,function(){if(i)return i;{let[e,n]=Ae(t);return i={interceptor:te,...e},N(t,n),i}}())},enumerable:!1})})),e}function ae(e,t,n,...r){try{return n(...r)}catch(n){se(n,e,t)}}function se(e,t,n=void 0){Object.assign(e,{el:t,expression:n}),console.warn(`Alpine Expression Error: ${e.message}\n\n${n?'Expression: "'+n+'"\n\n':""}`,t),setTimeout((()=>{throw e}),0)}var le=!0;function ce(e){let t=le;le=!1;let n=e();return le=t,n}function ue(e,t,n={}){let r;return fe(e,t)((e=>r=e),n),r}function fe(...e){return de(...e)}var de=pe;function pe(e,t){let n={};oe(n,e);let r=[n,...X(e)],i="function"==typeof t?function(e,t){return(n=(()=>{}),{scope:r={},params:i=[]}={})=>{he(n,t.apply(Y([r,...e]),i))}}(r,t):function(e,t,n){let r=function(e,t){if(_e[e])return _e[e];let n=Object.getPrototypeOf((async function(){})).constructor,r=/^[\n\s]*if.*\(.*\)/.test(e.trim())||/^(let|const)\s/.test(e.trim())?`(async()=>{ ${e} })()`:e;let i=(()=>{try{let t=new n(["__self","scope"],`with (scope) { __self.result = ${r} }; __self.finished = true; return __self.result;`);return Object.defineProperty(t,"name",{value:`[Alpine] ${e}`}),t}catch(n){return se(n,t,e),Promise.resolve()}})();return _e[e]=i,i}(t,n);return(i=(()=>{}),{scope:o={},params:a=[]}={})=>{r.result=void 0,r.finished=!1;let s=Y([o,...e]);if("function"==typeof r){let e=r(r,s).catch((e=>se(e,n,t)));r.finished?(he(i,r.result,s,a,n),r.result=void 0):e.then((e=>{he(i,e,s,a,n)})).catch((e=>se(e,n,t))).finally((()=>r.result=void 0))}}}(r,t,e);return ae.bind(null,e,t,i)}var _e={};function he(e,t,n,r,i){if(le&&"function"==typeof t){let o=t.apply(n,r);o instanceof Promise?o.then((t=>he(e,t,n,r))).catch((e=>se(e,i,t))):e(o)}else"object"==typeof t&&t instanceof Promise?t.then((t=>e(t))):e(t)}var ve="x-";function ye(e=""){return ve+e}var me={};function ge(e,t){return me[e]=t,{before(t){if(!me[t])return void console.warn("Cannot find directive `${directive}`. `${name}` will use the default order of execution");const n=Me.indexOf(t);Me.splice(n>=0?n:Me.indexOf("DEFAULT"),0,e)}}}function xe(e,t,n){if(t=Array.from(t),e._x_virtualDirectives){let n=Object.entries(e._x_virtualDirectives).map((([e,t])=>({name:e,value:t}))),r=be(n);n=n.map((e=>r.find((t=>t.name===e.name))?{name:`x-bind:${e.name}`,value:`"${e.value}"`}:e)),t=t.concat(n)}let r={},i=t.map(ke(((e,t)=>r[e]=t))).filter(Te).map(function(e,t){return({name:n,value:r})=>{let i=n.match(Le()),o=n.match(/:([a-zA-Z0-9\-_:]+)/),a=n.match(/\.[^.\]]+(?=[^\]]*$)/g)||[],s=t||e[n]||n;return{type:i?i[1]:null,value:o?o[1]:null,modifiers:a.map((e=>e.replace(".",""))),expression:r,original:s}}}(r,n)).sort(Ne);return i.map((t=>function(e,t){let n=me[t.type]||(()=>{}),[r,i]=Ae(e);R(e,t.original,i);let o=()=>{e._x_ignore||e._x_ignoreSelf||(n.inline&&n.inline(e,t,r),n=n.bind(n,e,t,r),we?Ee.get(Se).push(n):n())};return o.runCleanups=i,o}(e,t)))}function be(e){return Array.from(e).map(ke()).filter((e=>!Te(e)))}var we=!1,Ee=new Map,Se=Symbol();function Ae(e){let t=[],[n,r]=function(e){let t=()=>{};return[n=>{let r=o(n);return e._x_effects||(e._x_effects=new Set,e._x_runEffects=()=>{e._x_effects.forEach((e=>e()))}),e._x_effects.add(r),t=()=>{void 0!==r&&(e._x_effects.delete(r),a(r))},r},()=>{t()}]}(e);return t.push(r),[{Alpine:_t,effect:n,cleanup:e=>t.push(e),evaluateLater:fe.bind(fe,e),evaluate:ue.bind(ue,e)},()=>t.forEach((e=>e()))]}var Oe=(e,t)=>({name:n,value:r})=>(n.startsWith(e)&&(n=n.replace(e,t)),{name:n,value:r});function ke(e=(()=>{})){return({name:t,value:n})=>{let{name:r,value:i}=je.reduce(((e,t)=>t(e)),{name:t,value:n});return r!==t&&e(r,t),{name:r,value:i}}}var je=[];function Ce(e){je.push(e)}function Te({name:e}){return Le().test(e)}var Le=()=>new RegExp(`^${ve}([^:^.]+)\\b`),$e="DEFAULT",Me=["ignore","ref","data","id","anchor","bind","init","for","model","modelable","transition","show","if",$e,"teleport"];function Ne(e,t){let n=-1===Me.indexOf(e.type)?$e:e.type,r=-1===Me.indexOf(t.type)?$e:t.type;return Me.indexOf(n)-Me.indexOf(r)}var Pe=[],Re=!1;function qe(e=(()=>{})){return queueMicrotask((()=>{Re||setTimeout((()=>{Ie()}))})),new Promise((t=>{Pe.push((()=>{e(),t()}))}))}function Ie(){for(Re=!1;Pe.length;)Pe.shift()()}function ze(e,t){return Array.isArray(t)?Be(e,t.join(" ")):"object"==typeof t&&null!==t?function(e,t){let n=e=>e.split(" ").filter(Boolean),r=Object.entries(t).flatMap((([e,t])=>!!t&&n(e))).filter(Boolean),i=Object.entries(t).flatMap((([e,t])=>!t&&n(e))).filter(Boolean),o=[],a=[];return i.forEach((t=>{e.classList.contains(t)&&(e.classList.remove(t),a.push(t))})),r.forEach((t=>{e.classList.contains(t)||(e.classList.add(t),o.push(t))})),()=>{a.forEach((t=>e.classList.add(t))),o.forEach((t=>e.classList.remove(t)))}}(e,t):"function"==typeof t?ze(e,t()):Be(e,t)}function Be(e,t){return t=!0===t?t="":t||"",n=t.split(" ").filter((t=>!e.classList.contains(t))).filter(Boolean),e.classList.add(...n),()=>{e.classList.remove(...n)};var n}function De(e,t){return"object"==typeof t&&null!==t?function(e,t){let n={};return Object.entries(t).forEach((([t,r])=>{n[t]=e.style[t],t.startsWith("--")||(t=t.replace(/([a-z])([A-Z])/g,"$1-$2").toLowerCase()),e.style.setProperty(t,r)})),setTimeout((()=>{0===e.style.length&&e.removeAttribute("style")})),()=>{De(e,n)}}(e,t):function(e,t){let n=e.getAttribute("style",t);return e.setAttribute("style",t),()=>{e.setAttribute("style",n||"")}}(e,t)}function Fe(e,t=(()=>{})){let n=!1;return function(){n?t.apply(this,arguments):(n=!0,e.apply(this,arguments))}}function He(e,t,n={}){e._x_transition||(e._x_transition={enter:{during:n,start:n,end:n},leave:{during:n,start:n,end:n},in(n=(()=>{}),r=(()=>{})){Ve(e,t,{during:this.enter.during,start:this.enter.start,end:this.enter.end},n,r)},out(n=(()=>{}),r=(()=>{})){Ve(e,t,{during:this.leave.during,start:this.leave.start,end:this.leave.end},n,r)}})}function We(e){let t=e.parentNode;if(t)return t._x_hidePromise?t:We(t)}function Ve(e,t,{during:n,start:r,end:i}={},o=(()=>{}),a=(()=>{})){if(e._x_transitioning&&e._x_transitioning.cancel(),0===Object.keys(n).length&&0===Object.keys(r).length&&0===Object.keys(i).length)return o(),void a();let s,l,c;!function(e,t){let n,r,i,o=Fe((()=>{W((()=>{n=!0,r||t.before(),i||(t.end(),Ie()),t.after(),e.isConnected&&t.cleanup(),delete e._x_transitioning}))}));e._x_transitioning={beforeCancels:[],beforeCancel(e){this.beforeCancels.push(e)},cancel:Fe((function(){for(;this.beforeCancels.length;)this.beforeCancels.shift()();o()})),finish:o},W((()=>{t.start(),t.during()})),Re=!0,requestAnimationFrame((()=>{if(n)return;let o=1e3*Number(getComputedStyle(e).transitionDuration.replace(/,.*/,"").replace("s","")),a=1e3*Number(getComputedStyle(e).transitionDelay.replace(/,.*/,"").replace("s",""));0===o&&(o=1e3*Number(getComputedStyle(e).animationDuration.replace("s",""))),W((()=>{t.before()})),r=!0,requestAnimationFrame((()=>{n||(W((()=>{t.end()})),Ie(),setTimeout(e._x_transitioning.finish,o+a),i=!0)}))}))}(e,{start(){s=t(e,r)},during(){l=t(e,n)},before:o,end(){s(),c=t(e,i)},after:a,cleanup(){l(),c()}})}function Ue(e,t,n){if(-1===e.indexOf(t))return n;const r=e[e.indexOf(t)+1];if(!r)return n;if("scale"===t&&isNaN(r))return n;if("duration"===t||"delay"===t){let e=r.match(/([0-9]+)ms/);if(e)return e[1]}return"origin"===t&&["top","right","left","center","bottom"].includes(e[e.indexOf(t)+2])?[r,e[e.indexOf(t)+2]].join(" "):r}ge("transition",((e,{value:t,modifiers:n,expression:r},{evaluate:i})=>{"function"==typeof r&&(r=i(r)),!1!==r&&(r&&"boolean"!=typeof r?function(e,t,n){He(e,ze,""),{enter:t=>{e._x_transition.enter.during=t},"enter-start":t=>{e._x_transition.enter.start=t},"enter-end":t=>{e._x_transition.enter.end=t},leave:t=>{e._x_transition.leave.during=t},"leave-start":t=>{e._x_transition.leave.start=t},"leave-end":t=>{e._x_transition.leave.end=t}}[n](t)}(e,r,t):function(e,t,n){He(e,De);let r=!t.includes("in")&&!t.includes("out")&&!n,i=r||t.includes("in")||["enter"].includes(n),o=r||t.includes("out")||["leave"].includes(n);t.includes("in")&&!r&&(t=t.filter(((e,n)=>nn>t.indexOf("out"))));let a=!t.includes("opacity")&&!t.includes("scale"),s=a||t.includes("opacity")?0:1,l=a||t.includes("scale")?Ue(t,"scale",95)/100:1,c=Ue(t,"delay",0)/1e3,u=Ue(t,"origin","center"),f="opacity, transform",d=Ue(t,"duration",150)/1e3,p=Ue(t,"duration",75)/1e3,_="cubic-bezier(0.4, 0.0, 0.2, 1)";i&&(e._x_transition.enter.during={transformOrigin:u,transitionDelay:`${c}s`,transitionProperty:f,transitionDuration:`${d}s`,transitionTimingFunction:_},e._x_transition.enter.start={opacity:s,transform:`scale(${l})`},e._x_transition.enter.end={opacity:1,transform:"scale(1)"}),o&&(e._x_transition.leave.during={transformOrigin:u,transitionDelay:`${c}s`,transitionProperty:f,transitionDuration:`${p}s`,transitionTimingFunction:_},e._x_transition.leave.start={opacity:1,transform:"scale(1)"},e._x_transition.leave.end={opacity:s,transform:`scale(${l})`})}(e,n,t))})),window.Element.prototype._x_toggleAndCascadeWithTransitions=function(e,t,n,r){const i="visible"===document.visibilityState?requestAnimationFrame:setTimeout;let o=()=>i(n);t?e._x_transition&&(e._x_transition.enter||e._x_transition.leave)?e._x_transition.enter&&(Object.entries(e._x_transition.enter.during).length||Object.entries(e._x_transition.enter.start).length||Object.entries(e._x_transition.enter.end).length)?e._x_transition.in(n):o():e._x_transition?e._x_transition.in(n):o():(e._x_hidePromise=e._x_transition?new Promise(((t,n)=>{e._x_transition.out((()=>{}),(()=>t(r))),e._x_transitioning&&e._x_transitioning.beforeCancel((()=>n({isFromCancelledTransition:!0})))})):Promise.resolve(r),queueMicrotask((()=>{let t=We(e);t?(t._x_hideChildren||(t._x_hideChildren=[]),t._x_hideChildren.push(e)):i((()=>{let t=e=>{let n=Promise.all([e._x_hidePromise,...(e._x_hideChildren||[]).map(t)]).then((([e])=>e()));return delete e._x_hidePromise,delete e._x_hideChildren,n};t(e).catch((e=>{if(!e.isFromCancelledTransition)throw e}))}))})))};var Je=!1;function Ke(e,t=(()=>{})){return(...n)=>Je?t(...n):e(...n)}var Ze=[];function Xe(e){Ze.push(e)}var Ye=!1;function Ge(e){let t=o;h(((e,n)=>{let r=t(e);return a(r),()=>{}})),e(),h(t)}function Qe(e,t,n,r=[]){switch(e._x_bindings||(e._x_bindings=i({})),e._x_bindings[t]=n,t=r.includes("camel")?t.toLowerCase().replace(/-(\w)/g,((e,t)=>t.toUpperCase())):t){case"value":!function(e,t){if("radio"===e.type)void 0===e.attributes.value&&(e.value=t),window.fromModel&&(e.checked="boolean"==typeof t?nt(e.value)===t:tt(e.value,t));else if("checkbox"===e.type)Number.isInteger(t)?e.value=t:Array.isArray(t)||"boolean"==typeof t||[null,void 0].includes(t)?Array.isArray(t)?e.checked=t.some((t=>tt(t,e.value))):e.checked=!!t:e.value=String(t);else if("SELECT"===e.tagName)!function(e,t){const n=[].concat(t).map((e=>e+""));Array.from(e.options).forEach((e=>{e.selected=n.includes(e.value)}))}(e,t);else{if(e.value===t)return;e.value=void 0===t?"":t}}(e,n);break;case"style":!function(e,t){e._x_undoAddedStyles&&e._x_undoAddedStyles(),e._x_undoAddedStyles=De(e,t)}(e,n);break;case"class":!function(e,t){e._x_undoAddedClasses&&e._x_undoAddedClasses(),e._x_undoAddedClasses=ze(e,t)}(e,n);break;case"selected":case"checked":!function(e,t,n){et(e,t,n),function(e,t,n){e[t]!==n&&(e[t]=n)}(e,t,n)}(e,t,n);break;default:et(e,t,n)}}function et(e,t,n){[null,void 0,!1].includes(n)&&function(e){return!["aria-pressed","aria-checked","aria-expanded","aria-selected"].includes(e)}(t)?e.removeAttribute(t):(rt(t)&&(n=t),function(e,t,n){e.getAttribute(t)!=n&&e.setAttribute(t,n)}(e,t,n))}function tt(e,t){return e==t}function nt(e){return!![1,"1","true","on","yes",!0].includes(e)||![0,"0","false","off","no",!1].includes(e)&&(e?Boolean(e):null)}function rt(e){return["disabled","checked","required","readonly","hidden","open","selected","autofocus","itemscope","multiple","novalidate","allowfullscreen","allowpaymentrequest","formnovalidate","autoplay","controls","loop","muted","playsinline","default","ismap","reversed","async","defer","nomodule"].includes(e)}function it(e,t,n){let r=e.getAttribute(t);return null===r?"function"==typeof n?n():n:""===r||(rt(t)?!![t,"true"].includes(r):r)}function ot(e,t){var n;return function(){var r=this,i=arguments;clearTimeout(n),n=setTimeout((function(){n=null,e.apply(r,i)}),t)}}function at(e,t){let n;return function(){let r=arguments;n||(e.apply(this,r),n=!0,setTimeout((()=>n=!1),t))}}function st({get:e,set:t},{get:n,set:r}){let i,s=!0,l=o((()=>{const o=e(),a=n();if(s)r(lt(o)),s=!1,i=JSON.stringify(o);else{const e=JSON.stringify(o);e!==i?(r(lt(o)),i=e):(t(lt(a)),i=JSON.stringify(a))}JSON.stringify(n()),JSON.stringify(e())}));return()=>{a(l)}}function lt(e){return"object"==typeof e?JSON.parse(JSON.stringify(e)):e}var ct={},ut=!1,ft={};function dt(e,t,n){let r=[];for(;r.length;)r.pop()();let i=Object.entries(t).map((([e,t])=>({name:e,value:t}))),o=be(i);return i=i.map((e=>o.find((t=>t.name===e.name))?{name:`x-bind:${e.name}`,value:`"${e.value}"`}:e)),xe(e,i,n).map((e=>{r.push(e.runCleanups),e()})),()=>{for(;r.length;)r.pop()()}}var pt={},_t={get reactive(){return i},get release(){return a},get effect(){return o},get raw(){return s},version:"3.13.3",flushAndStopDeferringMutations:function(){V=!1,J(U),U=[]},dontAutoEvaluateFunctions:ce,disableEffectScheduling:function(e){_=!1,e(),_=!0},startObservingMutations:B,stopObservingMutations:D,setReactivityEngine:function(e){i=e.reactive,a=e.release,o=t=>e.effect(t,{scheduler:e=>{_?function(e){var t;t=e,u.includes(t)||u.push(t),c||l||(l=!0,queueMicrotask(p))}(e):e()}}),s=e.raw},onAttributeRemoved:R,onAttributesAdded:P,closestDataStack:X,skipDuringClone:Ke,onlyDuringClone:function(e){return(...t)=>Je&&e(...t)},addRootSelector:S,addInitSelector:A,interceptClone:Xe,addScopeToNode:Z,deferMutations:function(){V=!0},mapAttributes:Ce,evaluateLater:fe,interceptInit:function(e){j.push(e)},setEvaluator:function(e){de=e},mergeProxies:Y,extractProp:function(e,t,n,r=!0){if(e._x_bindings&&void 0!==e._x_bindings[t])return e._x_bindings[t];if(e._x_inlineBindings&&void 0!==e._x_inlineBindings[t]){let n=e._x_inlineBindings[t];return n.extract=r,ce((()=>ue(e,n.expression)))}return it(e,t,n)},findClosest:k,onElRemoved:N,closestRoot:O,destroyTree:T,interceptor:te,transition:Ve,setStyles:De,mutateDom:W,directive:ge,entangle:st,throttle:at,debounce:ot,evaluate:ue,initTree:C,nextTick:qe,prefixed:ye,prefix:function(e){ve=e},plugin:function(e){(Array.isArray(e)?e:[e]).forEach((e=>e(_t)))},magic:ie,store:function(e,t){if(ut||(ct=i(ct),ut=!0),void 0===t)return ct[e];ct[e]=t,"object"==typeof t&&null!==t&&t.hasOwnProperty("init")&&"function"==typeof t.init&&ct[e].init(),ee(ct[e])},start:function(){var e;g&&m("Alpine has already been initialized on this page. Calling Alpine.start() more than once can cause problems."),g=!0,document.body||m("Unable to initialize. Trying to load Alpine before `` is available. Did you forget to add `defer` in Alpine's ` + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Citation

+

To reference the 🤖🌊 aiFlows library, for now, please cite the paper Flows: Building Blocks of Reasoning and Collaborating AI:

+
@misc{josifoski2023flows,
+      title={Flows: Building Blocks of Reasoning and Collaborating AI},
+      author={Martin Josifoski and Lars Klein and Maxime Peyrard and Yifei Li and Saibo Geng and Julian Paul Schnitzler and Yuxing Yao and Jiheng Wei and Debjit Paul and Robert West},
+      year={2023},
+      eprint={2308.01285},
+      archivePrefix={arXiv},
+      primaryClass={cs.AI}
+}
+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/automating_documentation_flow.html b/docs/built_with_sphinx/html/contributing_info/automating_documentation_flow.html new file mode 100644 index 0000000..d101ef9 --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/automating_documentation_flow.html @@ -0,0 +1,197 @@ + + + + + + + + + +Automating the documentation of a Flow on the FlowVerse | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Automating the documentation of a Flow on the FlowVerse

+

Documenting your Flow is a crucial step in ensuring clarity and accessibility. Let’s explore an efficient way to automate this process using pydoc-markdown.

+
+

1. Document Your Flow in Sphinx Format

+

Start by documenting your Flow in Sphinx format. Need a reference? Check out ChatFlowModule for inspiration.

+
+

Pro tip: Leverage VSCode’s GitHub Copilot to expedite the documentation process.

+
+
+
+

2. Install pydoc-markdown

+

Ensure you have the necessary tool installed by running the following command:

+
pip install pydoc-markdown
+
+
+
+ +
+

4. Build the Markdown

+

Generate the Markdown documentation using pydoc-markdown. Replace <YOUR-FLOW> with the name of your Flow file (excluding the .py extension). +For example, if your Flow file is named Flow1.py, execute the following command:

+
pydoc-markdown  -p Flow1 --render-toc > README.md
+
+
+

If you have multiple Flow files, consider using the following command to include all files in the documentation:

+
pydoc-markdown  -I . --render-toc > README.md
+
+
+
+

This process automates the generation of Markdown documentation for your Flow, streamlining the contribution process on the FlowVerse. Happy documenting! 🚀✨

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/coding_standards.html b/docs/built_with_sphinx/html/contributing_info/coding_standards.html new file mode 100644 index 0000000..c2b306a --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/coding_standards.html @@ -0,0 +1,215 @@ + + + + + + + + + +Coding Standards | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Coding Standards

+

When contributing to aiFlows library, it’s essential to adhere to the following coding standards to maintain consistency, readability, and the overall quality of the codebase:

+
+

1. Simplicity and Readability

+

Strive to make your code as simple and readable as possible. Use clear and meaningful variable/function names, and avoid unnecessary complexity.

+
+
+

2. Best Practices

+

Follow industry best practices when implementing features or fixing bugs. This includes adhering to language-specific conventions and guidelines.

+
+
+

3. Documentation

+

Document your code thoroughly. Provide comments where necessary to explain complex logic or algorithms. Use clear and concise language to describe your thought process.

+
+
+

4. Docstrings in Sphinx Format

+

For all new functions and classes, include docstrings in Sphinx format. These docstrings should describe the purpose, parameters, return values, and possibly exceptions raised by the function or class. Here is an example of the docstring of a function in the Sphinx format:

+
def example_function(param1, param2):
+    """
+    Brief description of the function.
+
+    :param param1: Description of the first parameter.
+    :param param2: Description of the second parameter.
+    :return: Description of the return value.
+    :raises CustomException: Description of when this exception is raised.
+    """
+    # Function implementation
+    return result
+
+
+

For more details on the Sphinx docstring format check out this link: Sphinx Docstring Format.

+
+
+

5. Backward Compatibility

+

Ensure that your code changes are backward compatible whenever possible. This helps maintain the stability of the library for existing users.

+
+
+

6. Thorough Testing

+

Create comprehensive tests for your code. Tests should cover various scenarios, including edge cases, to ensure the robustness of your implementation.

+
+
+

7. Test Coverage

+

Try to maintain or increase test coverage when adding new features or modifying existing ones when needed. Aim for a high percentage of code coverage to catch potential issues early.

+
+
+

8. Feature Tests

+

When introducing new features, include corresponding tests. Every feature should have a test, and existing tests should be updated as needed.

+
+

Your dedication to simplicity, readability, and best practices is greatly appreciated. Your contributions help make the aiFlows library more accessible, robust, and user-friendly for the entire community.

+

Once again, thank you for being a valued member of our community and for your commitment to making aiFlows even better. Happy coding! 🚀⭐

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/contribute_index.html b/docs/built_with_sphinx/html/contributing_info/contribute_index.html new file mode 100644 index 0000000..5b49ba6 --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/contribute_index.html @@ -0,0 +1,255 @@ + + + + + + + + + +Contribution Guide | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Contribution Guide

+

This guide provides information on how to contribute to the aiFlows. Whether you’re interested in coding, documentation, collaboration, fixing bugs or adding features you’ll find useful resources here. +If you know what you’re looking for, use the table of contents to jump right in. If you’re new to aiFlows, start with the Preface section below.

+
+

Table of Contents

+ +
+
+

Preface

+
+

Our goal is to make Flows a community-driven project that will benefit researchers and developers alike (see the Why should I use aiFlows? ) and to achieve this goal, we need your help.

+

You can become a part of the project in a few ways:

+
    +
  • contribute to the aiFlows codebase: this will directly improve the library and benefit everyone using it

  • +
  • contribute to the FlowVerse: by making your work accessible to everyone, others might improve your work and build on it, or you can build on others’ work

  • +
  • use the library in your creative projects, push it to its limits, and share your feedback: the proof of the pudding is in the eating, and the best way to identify promising directions, as well as important missing features, is by experimenting

  • +
  • last but not least, star the repository and shout out aiFlows with your friends and colleagues; spread the word with love

  • +
+

We will support the community in the best way we can but also lead by example. In the coming weeks, we will share:

+
    +
  • a roadmap for the library (FlowViz; FlowStudio; improve flexibility, developer experience, and support for concurrency, etc. – feedback and help would be greatly appreciated!)

  • +
  • write-ups outlining features, ideas, and our long-term vision for Flows – we encourage you to pick up any of these and start working on them in whatever way you see fit

  • +
  • a version of JARVIS – your fully customizable open-source version of ChatGPT+(++), which we will continue building in public! We hope that this excites you as much as it excites us, and JARVIS will become one of those useful projects that will constantly push the boundaries of what’s possible with Flows

  • +
+

We have tried to find a way for anyone to benefit by contributing to the project. Below we describe the envisioned workflows in more detail (we would love to hear your feedback on this – the Discord server already has a channel for it :)).

+

In a nutshell, this is just the beginning, and we have a long way to go. Stay tuned, and let’s work on a great (open-source) AI future together!

+
+
+
+

Want To Contribute to aiFlows?

+
+

Connecting With Like-Minded Contributors & How To Get Help ?

+
+

Explore the Looking for Collaborators ? section for resources, tips, and guidance on connecting with potential collaborators, sharing project ideas, building your dream team or how to get help. 🚀🌟

+
+
+

Contributing To aiFlows Library: Bug Fixes and Feature Additions Guide

+
+

Contribute to the aiFlows Library! Follow the guidelines in the Contributing to aiFlows Library (for bug fixes and adding features) guide for bug fixes and feature additions. +Report issues on GitHub, discuss on Discord, and create pull requests. Your contributions matter! 🚀🌟

+
+
+
+

Contributing To the FlowVerse: Creating New Flows and Contributing To Existing Flows Guide

+
+

Contribute to the Flows from the FlowVerse! Follow the guidelines in the Recommended Workflow for Contributing to a Flow on the FlowVerse guide to understand how to create and publish your Flow or contribute to an existing one. 🚀

+
+
+
+
+

Automating the Generation of FlowCards (README) for the FlowVerse

+
+

Check out the Automating the documentation of a Flow on the FlowVerse guide to speed up the process of creating FlowCards (READMEs) for the FlowVerse.

+
+
+
+

Coding Standards for aiFlows

+
+

Review the coding standards for aiFlows Library contributions in the Coding Standards guide. Essential guidelines ensuring a high-quality codebase. +Familiarize yourself with these standards before submitting your Pull Request. 🚀⭐

+
+
+
+

Contributors Wall and Sharing/Publicizing Your aiFlows Project or Flow

+
+

Learn about contributor recognition, sharing work on Discord, and the importance of acknowledgment for aiFlows library contributions in the Publicizing Your Work guide. Happy contributing! 🚀🌐

+
+
+
+

Licence Information (Spoiler Alert: It’s Open-Source and Completely Free!)

+
+

Check out Licence Info: Frequently Asked Questions to get quick answers about aiFlows’ open-source MIT License, its free-of-charge accessibility, and you can use in commercial projects. Explore how you can contribute to the thriving aiFlows community without any worries about the legal stuff. 🚀🌟

+
+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/contributing_to_FlowVerse.html b/docs/built_with_sphinx/html/contributing_info/contributing_to_FlowVerse.html new file mode 100644 index 0000000..67c665e --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/contributing_to_FlowVerse.html @@ -0,0 +1,213 @@ + + + + + + + + + +Recommended Workflow for Contributing to a Flow on the FlowVerse | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+ +
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/contributing_to_aiFlows.html b/docs/built_with_sphinx/html/contributing_info/contributing_to_aiFlows.html new file mode 100644 index 0000000..4d9f650 --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/contributing_to_aiFlows.html @@ -0,0 +1,281 @@ + + + + + + + + + +Contributing to aiFlows Library (for bug fixes and adding features) | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Contributing to aiFlows Library (for bug fixes and adding features)

+
+

Step 1: Identifying and Reporting an Issue / Bug

+
+

1.1. Check Existing Issues & Talk to the Community

+

Before creating a new issue, check if the problem you’ve encountered already exists. If it does, consider commenting on the existing issue to +provide additional details or express your interest in working on it.

+
+

Community Discussion on Discord:

+

Additionally, for more immediate interaction and collaboration, you can discuss the issue on the project’s Discord channel. +Join the 💻│developers or 🐛│debugging channels to connect with the community, seek advice, and coordinate efforts. Engaging with the +community on Discord can provide valuable insights and assistance throughout the issue resolution process.

+
+
+
+

1.2. Creating a New Issue

+

If the issue doesn’t exist, create a new one. Include a clear and concise title, detailed description of the problem, and steps to reproduce it. +Utilize the “Report a Bug” template for bug reports and the “Feature Request” template for suggesting new features.

+
+
+
+

Step 2: Getting Started with a Pull Request (PR)

+
+

2.0. Inform the Community

+

Comment on the issue you’re working on, informing others that you’re actively working on a solution. +Provide progress updates if needed. Also, inform the community on our Discord 🔨│community-projects forum that you’re working on it. +Engage with the community, share your ideas, and seek feedback on your pull request. This open communication is crucial not only for +collaboration but also to inform others that you’re actively working on the issue. This helps prevent duplicate work and ensures that community members are aware of ongoing efforts, +fostering a collaborative and efficient development environment.

+
+
+

2.1. Fork the Repository

+

On the “aiflows” GitHub page, click “Fork” to create a copy of the repository under your GitHub account.

+
+
+

2.2. Clone Your Fork

+

Clone the forked repository to your local machine using the following command:

+
git clone https://github.com/your-username/aiflows.git
+
+
+
+
+

2.3. Create a New Branch

+

Create a new branch for your fix or feature:

+
git checkout -b fix-branch
+
+
+
+
+
+

Step 3: Coding and Making a Pull Request

+
+

3.1 Make Changes & And adhere to aiFlow’s coding practices

+

Implement your fix or feature. Follow best practices, and consider the project’s Coding Standards.

+
+
+

3.2. Commit Changes

+

Commit your changes with clear and descriptive messages:

+
git add .
+git commit -m "Fix: Describe the issue or feature"
+
+
+
+
+

3.3. Push Changes

+

Push your changes to your forked repository:

+
git push origin fix-branch
+
+
+
+
+

3.4. Create a Pull Request

+

On the GitHub page of your fork, create a new pull request. Ensure you select the appropriate branch in the “base” and “compare” dropdowns. +Make sure to check out this Github tutorial for more details: Creating a pull request from a fork.

+
+ +
+
+

Step 4: Addressing Reviewer Concerns

+
+

4.1. Reviewer Feedback

+

Reviewers may suggest changes to your code. Be open to feedback and make necessary adjustments.

+
+
+

4.2. Coding Style

+

Ensure your code aligns with the project’s coding style. If unsure, refer to the project’s documentation or ask for clarification.

+
+

Thank you for considering contributing to the aiFlows library! Your dedication and effort are immensely appreciated. +Contributors like you make a significant impact, and we want to express our gratitude. +Remember, your name will proudly appear on our contributors’ wall, showcasing your valuable contributions to the aiFlows project 🚀🔥

+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/finding_collaborators.html b/docs/built_with_sphinx/html/contributing_info/finding_collaborators.html new file mode 100644 index 0000000..015b47e --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/finding_collaborators.html @@ -0,0 +1,199 @@ + + + + + + + + + +Looking for Collaborators ? | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Looking for Collaborators ?

+

🤝 Seeking Collaborators? If you’re on the lookout for a collaborator to tackle an issue or work on a feature, head over to the 👥│flows-friends forum on Discord. +Share your project ideas, highlight your skills, or specify areas where you could use assistance. For more targeted searches, consider posting in specialized channels, +such as the 🔬│research channel if you’re seeking a researcher. Your dream team may just be a click away. Let the collaboration begin! 🚀

+
+

Looking for Collaborators - FAQ

+
+

1. I’m Encountering Issues With Debugging. How Can the Community Help?

+
+

🕵️ If you’re in need of debugging support, head over to the 🐛│debugging channel on Discord. +Engaging with the community there can provide valuable insights and assistance in resolving your issues.

+
+
+
+

2. Where Can I Get Feedback on My Work?

+
+

📣 For feedback on your work, visit the appropriate Discord channel based on your project or focus. +For FlowVerse-related projects, check out channels like Flow-sharing or Flow-verse. If you’re involved in research, head to the 🔬│research channel. General +development queries can be directed to the developers channel. Community-projects are also a great space for feedback.

+
+
+
+

3. I’m Looking To Brainstorm Ideas. Where Can I Discuss Them With the Community?

+
+

💡 For brainstorming sessions, consider discussing your ideas in channels like Flows Ideas, Flow-verse, or Developers on Discord. +Engaging with the community in these spaces can lead to fruitful discussions and valuable input on your concepts.

+
+
+ +
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/index.html b/docs/built_with_sphinx/html/contributing_info/index.html new file mode 100644 index 0000000..c65deb0 --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/index.html @@ -0,0 +1,172 @@ + + + + + + + + + +Contribute | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Contribute

+

Our mission is to make this a community-driven project that will benefit researchers and developers alike +(see the Why should I use aiFlows? ) and to achieve this goal, we need your help.

+

You can become a part of the project in a few ways:

+
    +
  • contribute to the aiFlows codebase: this will directly improve the library and benefit everyone using it

  • +
  • contribute to the FlowVerse: by making your work accessible to everyone, others might improve your work and build on it, or you can build on others’ work

  • +
  • use the library in your creative projects, push it to its limits, and share your feedback: the proof of the pudding is in the eating, and the best way to identify promising directions,

  • +
+

as well as important missing features, is by experimenting +- last but not least, ⭐ the repository and 📣 share aiFlows with your friends and colleagues; spread the word ❤️

+

We will support the community in the best way we can but also lead by example. In the coming weeks, we will share:

+
    +
  • a roadmap for the library (FlowViz; FlowStudio; improve flexibility, developer experience, and support for concurrency, etc. – feedback and help would be greatly appreciated!)

  • +
  • write-ups outlining features, ideas, and our long-term vision for Flows – we encourage you to pick up any of these and start working on them in whatever way you see fit

  • +
  • a version of JARVIS – your fully customizable open-source version of ChatGPT+(++), which we will continue building in public! We hope that this excites you as much as it excites us,

  • +
+

and JARVIS will become one of those useful projects that will constantly push the boundaries of what’s possible with Flows

+

We have tried to find a way for anyone to benefit by contributing to the project. The Contribution Guide contr describes our envisioned workflow and how you could get +involved in more detail (we would love to hear your feedback on it – the Discord server already has a channel for it :).

+

In a nutshell, this is just the beginning, and we have a long way to go. Stay tuned, and let’s work on a great (open-source) AI future together!

+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/license_info.html b/docs/built_with_sphinx/html/contributing_info/license_info.html new file mode 100644 index 0000000..faa1bb8 --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/license_info.html @@ -0,0 +1,190 @@ + + + + + + + + + +Licence Info: Frequently Asked Questions | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Licence Info: Frequently Asked Questions

+
+

1. I’m Worried About License Issues. Is aiFlows Open-Source?

+

Absolutely! aiFlows is proudly open-source, and it operates under the MIT License.

+

MIT License: The MIT License is a permissive open-source license that grants you the freedom to use, modify, and distribute aiFlows without any restrictions. +It encourages collaboration and community contribution.

+
+
+

2. Is There Any Catch? Do I Have To Pay Anything?

+

Not at all! aiFlows is free to use, and there’s no need to worry about hidden fees. +It’s a library designed to make development, research, and the creation of structured interactions seamless and accessible.

+
+
+

3. Can I Use aiFlows in Commercial Projects?

+

Yes, you can! The MIT License allows you to use aiFlows in both open-source and commercial projects. +Feel free to incorporate aiFlows into your endeavors, whether they are for research, development, or commercial applications.

+
+
+

4. Are There Any Restrictions on How I Can Use aiFlows?

+

Nope! The MIT License provides you with considerable freedom. You can use aiFlows in any way you see fit, modify it according to your needs, +and integrate it into your projects without worrying about restrictive conditions.

+
+
+

5. How Can I Contribute to aiFlows?

+

Contributions are highly welcome! Whether it’s bug fixes, new features, or improvements, the community thrives on collaboration. Head over to the Contribution Guidelines to +understand how you can actively participate in making aiFlows even better.

+
+

Remember, aiFlows is here to empower your projects and initiatives without any catches. Your contributions and engagement with the community are what make aiFlows flourish. Happy coding! 🚀✨

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/contributing_info/recognition_info.html b/docs/built_with_sphinx/html/contributing_info/recognition_info.html new file mode 100644 index 0000000..79052b8 --- /dev/null +++ b/docs/built_with_sphinx/html/contributing_info/recognition_info.html @@ -0,0 +1,178 @@ + + + + + + + + + +Publicizing Your Work | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Publicizing Your Work

+
+

1. Do Contributors to aiFlows’ Codebase Appear on the Contributors Wall in the Library’s Next Release?

+

Absolutely! Contributors to aiFlows automatically earn a spot on the contributors’ wall in the README section of the library’s next release. Your efforts are recognized and celebrated as part of the growing community.

+
+
+

2. How Can I Share My Work With the Community?

+

Sharing your work is highly encouraged! Here are some channels on Discord to consider:

+
    +
  • For Flows On The FlowVerse: Utilize the 🤲│flow-sharing channel and the 🔨│community-projects forum on Discord.

  • +
  • For Contributions To aiFlows Library: Engage with the community in the 🔨│community-projects channels.

  • +
  • For Research Contributions: Share your findings on the 🔬│research channel or explore opportunities in 🔨│community-projects.

  • +
+
+
+

3. Are Contributors Cited for Their Contributions to Research?

+

Absolutely. Proper recognition is key. Contributors to projects and research are, and should always be, acknowledged and cited for their valuable contributions. This not only honors your work but also builds a culture of respect and collaboration within the community.

+

Remember, your contributions matter, and sharing your work not only benefits you but also enriches the entire aiFlows community. Happy contributing! 🚀🌐

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/genindex.html b/docs/built_with_sphinx/html/genindex.html new file mode 100644 index 0000000..af3c8e7 --- /dev/null +++ b/docs/built_with_sphinx/html/genindex.html @@ -0,0 +1,1325 @@ + + + + + + + + +Index | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+

Index

+

A

+ + + + +
+

B

+ + + + +
+

C

+ + + + +
+

D

+ + + + +
+

E

+ + + + +
+

F

+ + + + +
+

G

+ + + + +
+

H

+ + + + +
+

I

+ + + + +
+

J

+ + + + +
+

K

+ + + + +
+

L

+ + + + +
+

M

+ + + + +
+

N

+ + + + +
+

O

+ + + + +
+

P

+ + + + +
+

R

+ + + + +
+

S

+ + + + +
+

T

+ + + + +
+

U

+ + + + +
+

V

+ + + + +
+

W

+ + + + +
+
+ +
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Quick_Start/quick_start.html b/docs/built_with_sphinx/html/getting_started/Quick_Start/quick_start.html new file mode 100644 index 0000000..82a79b9 --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Quick_Start/quick_start.html @@ -0,0 +1,303 @@ + + + + + + + + + +Quick Start | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Quick Start

+

Welcome to the exciting world of aiFlows! 🚀

+

This tutorial will guide you through your first inference runs with different Flows from the FlowVerse for the task of question answering (QA) as an example. In the process, you’ll get familiar with the key aspects of the library and experience how, thanks to the modular abstraction and FlowVerse, we can trivially switch between very different pre-implemented question-answering Flows!

+

The guide is organized in two sections:

+
    +
  1. Section 1: Running your first QA Flow using a Flow from the FlowVerse 🥳

  2. +
  3. Section 2: FlowVerse Playground Notebook

  4. +
+
+

Section 1: Running your First QA Flow using a Flow from the FlowVerse

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Learned how to pull Flows from the FlowVerse

  • +
  • Run my first Flow

  • +
  • Understood how to pass my API information to a Flow

  • +
+

While, we support many more API providers (including custom ones), for the sake of simplicity, in this tutorial, we will use OpenAI and Azure.

+
+
+

Step 1: Pull a Flow From the FlowVerse

+

Explore a diverse array of Flows on the FlowVerse here. In this demonstration, we’ll illustrate how to use a Flow from the FlowVerse, focusing on the ChatAtomicFlow within the ChatFlowModule. This versatile Flow utilizes a language model (LLM) via an API to generate textual responses for given textual inputs. It’s worth noting the same process described here applies to any available Flow in the FlowVerse (implemented by any member of the community).

+

Without further ado, let’s dive in!

+

Concretely, you would use the sync_dependencies function to pull the flow definition and its code from the FlowVerse:

+
from aiflows import flow_verse
+dependencies = [
+{"url": "aiflows/ChatFlowModule", "revision": "main"}
+]
+
+flow_verse.sync_dependencies(dependencies)
+
+
+
+

External Library Dependencies

+

Each Flow on the FlowVerse should include a pip_requirements.txt file for external library dependencies (if it doesn’t have any, the file should be empty). You can check its dependencies on the FlowVerse. In general, if there are any, you need to make sure to install them.

+

As you can see here, the ChatFlowModule doesn’t have any external dependencies, so we’re all set.

+
+
+
+

Step 3: Run the Flow!

+

After executing sync_dependencies, the code implementation of ChatFlowModule has been pulled into the local repository. +We can now just import it:

+
from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow
+
+
+

Set your API information (copy-paste it):

+

+#OpenAI backend
+api_key = "" # copy paste your api key here
+api_information = [ApiInfo(backend_used="openai", api_key=api_key)]
+
+# Azure backend
+# api_key = "" # copy paste your api key here
+# api_base = "" # copy paste your api base here
+# api_version = "" #copypase your api base here
+# api_information = ApiInfo(backend_used = "azure",
+#                           api_base =api_base,
+#                           api_key = api_version,
+#                           api_version =  api_version )
+
+
+

Each flow from the FlowVerse should have a demo.yaml file, which is a demo configuration of how to instantiate the flow.

+

Load the demo.yaml configuration:

+
from aiflows.utils.general_helpers import read_yaml_file
+# get demo configuration
+cfg = read_yaml_file("flow_modules/aiflows/ChatFlowModule/demo.yaml")
+
+
+

An attentive reader might have noticed that the field flow.backend.api_infos in demo.yaml is set to “???” (see a snippet here below).

+
flow:  # Overrides the ChatAtomicFlow config
+  _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
+
+  name: "SimpleQA_Flow"
+  description: "A flow that answers questions."
+
+  # ~~~ Input interface specification ~~~
+  input_interface_non_initialized:
+    - "question"
+
+  # ~~~ backend model parameters ~~
+  backend:
+    _target_: aiflows.backends.llm_lite.LiteLLMBackend
+    api_infos: ???
+
+
+

The following overwrites the field with your personal API information:

+
# put the API information in the config
+cfg["flow"]["backend"]["api_infos"] = api_information
+
+
+

Instantiate your Flow:

+
# ~~~ Instantiate the Flow ~~~
+flow = ChatAtomicFlow.instantiate_from_default_config(**cfg["flow"])
+flow_with_interfaces = {
+    "flow": flow,
+    "input_interface": None,
+    "output_interface": None,
+}
+
+
+

Note that input_interface and output_interface are here to control the data that comes in and out of the flow. In this case, we don’t need specific data manipulation, so we will leave to None.

+

Load some data and run your flow with the FlowLauncher:

+
# ~~~ Get the data ~~~
+data = {"id": 0, "question": "What is the capital of France?"}
+
+# ~~~ Run the Flow ~~~
+_, outputs  = FlowLauncher.launch(
+        flow_with_interfaces= flow_with_interfaces ,data=data
+    )
+    # ~~~ Print the output ~~~
+flow_output_data = outputs[0]
+print(flow_output_data)
+
+
+

Congratulations! You’ve successfully run your first question-answering Flow!

+
+

You can find this example in runChatAtomicFlow.py

+

To run it, use the following commands in your terminal (make sure to copy-paste your keys first):

+
cd examples/quick_start/
+python runChatAtomicFlow.py
+
+
+

Upon execution, the result should appear as follows:

+
[{'api_output': 'The capital of France is Paris.'}]
+
+
+
+
+
+

Section 2: FlowVerse Playground Notebook

+

Want to quickly run some Flows from FlowVerse? Check out our jupyter notebook flow_verse_playground.ipynb where you can quicky switch between the following flows from the FlowVerse:

+ +
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/atomic_flow.html b/docs/built_with_sphinx/html/getting_started/Tutorial/atomic_flow.html new file mode 100644 index 0000000..348a8a6 --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/atomic_flow.html @@ -0,0 +1,263 @@ + + + + + + + + + +Atomic Flow Tutorial | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Atomic Flow Tutorial

+

This guide presents the concept of an AtomicFlow and is organized into two sections:

+
    +
  1. Section 1: Defining Atomic Flows

  2. +
  3. Section 2: Writing Your First Atomic Flow

  4. +
+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained insight into the relationship among a Flow, an input interface, and an output interface

  • +
  • Acquired hands-on experience in creating an AtomicFlow with the example of ReverseNumberAtomic

  • +
  • Learned how to run a flow with a FlowLauncher

  • +
+
+
+

Section 1: Defining Atomic Flows

+

The AtomicFlow class is a subclass of Flow and corresponds to an Input/Output interface around a tool (note that LLMs are also tools in the Flows framework!).

+

In the paper it’s defined as such:

+
+

An Atomic Flow is effectively a minimal wrapper around +a tool and achieves two things:

+
    +
  1. It fully specifies the tool (e.g., the most basic Atomic Flow around +GPT-4 would specify the prompts and the generation parameters)

  2. +
  3. It abstracts the complexity of the internal computation by exposing only a standard message-based interface for exchanging information with other Flows.

  4. +
+
+

Examples of Atomic Flows include:

+ +
+
+

Section 2: Writing Your First Atomic Flow

+

As a starting example, let’s create an Atomic Flow that takes a number and returns its reverse. (e.g., if the input is 1234, it should return 4321)

+

The flow configuration, presented as a YAML file, is outlined below (you can also review the configuration in reverseNumberAtomic.yaml):

+
name: "ReverseNumber"
+description: "A flow that takes in a number and reverses it."
+
+input_interface:
+  _target_: aiflows.interfaces.KeyInterface
+  keys_to_select: ["number"]
+
+output_interface: # Connector between the Flow's output and the caller
+  _target_: aiflows.interfaces.KeyInterface
+  keys_to_rename:
+    output_number: "reversed_number" # Rename the output_number to reversed_number
+
+
+

Breaking it down:

+
    +
  • The name and description parameters are self-explanatory. When defining a Flow you must always define these parameters

  • +
  • input_interface and output_interface define the transformation applied to the input and output data before and after calling the flow. In this case, the input_interface ensures the key number is in the input data dictionary and passes it to the flow. The output_interface renames the key output_number to reversed_number in the output data dictionary.

  • +
+

Now let’s define the Flow. The class would be implemented as follows (you can also check out the py file reverse_number_atomic.py):

+
class ReverseNumberAtomicFlow(AtomicFlow):
+    def __init__(self, **kwargs):
+        super().__init__(**kwargs)
+
+    # Customize the logic within this function as needed for your specific flow requirements.
+    def run(self,input_data: Dict[str, Any]) -> Dict[str, Any]:
+        input_number = input_data["number"]
+        output_number = int(str(input_number)[::-1])
+        response = {"output_number": output_number}
+        return response
+
+
+

and instantiate the Flow by executing:

+
overrides_config = read_yaml_file("reverseNumberAtomic.yaml")
+
+# ~~~ Instantiate the flow ~~~
+flow = ReverseNumberAtomicFlow.instantiate_from_default_config(overrides=overrides_config)
+
+
+

Note that you can also pass a Python dictionary as the overrides parameter and not rely on YAML files.

+

With all the preparations in place, we can now proceed to invoke our flow and execute it using the FlowLauncher.

+
# ~~~ Get the data ~~~
+data = {"id": 0, "number": 1234}  # This can be a list of samples
+
+# ~~~ Run inference ~~~
+path_to_output_file = None
+# path_to_output_file = "output.jsonl"
+_, outputs = FlowLauncher.launch(
+    flow_with_interfaces={"flow": flow}, data=data, path_to_output_file=path_to_output_file
+)
+
+# ~~~ Print the output ~~~
+flow_output_data = outputs[0]
+print(flow_output_data)
+
+
+

The complete example is accessible here and can be executed as follows:

+
cd examples/minimal\ reverse\ number/
+python reverse_number_atomic.py
+
+
+

Upon running, the answer you should expect is:

+
[{'output_number': 4321}]
+
+
+

A few other notable examples of an atomic flow include the HumanStandardInputFlowModule and the FixedReplyFlowModule Flow.

+
+

Next Tutorial: Composite Flow Tutorial

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/autogpt_tutorial.html b/docs/built_with_sphinx/html/getting_started/Tutorial/autogpt_tutorial.html new file mode 100644 index 0000000..f40847e --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/autogpt_tutorial.html @@ -0,0 +1,400 @@ + + + + + + + + + +AutoGPT Tutorial | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

AutoGPT Tutorial

+

Prequisites: setting up your API keys (see setting_up_aiFlows.md), Introducing the FlowVerse with a Simple Q&A Flow Tutorial, ReAct Tutorial, React With Human Feedback Tutorial

+

This guide introduces an implementation of the AutoGPT flow. It’s organized in two sections:

+
    +
  1. Section 1: What’s The AutoGPT flow ?

  2. +
  3. Section 2: Running the AutoGPT Flow

  4. +
+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Acknowledged the differences between AutoGPT and ReActWithHumanFeedback and their implications

  • +
  • Gained proficiency in executing the AutoGPTFlow

  • +
  • Enhanced comprehension of intricate flow structures

  • +
+
+
+

Section 1: What’s The AutoGPT flow ?

+

In the previous tutorial React With Human Feedback Tutorial, we introduced the ReActWithHumanFeedback Flow. Towards the end, while the flow demonstrated effective functionality, we observed a notable challenge, especially in prolonged conversations. The principal issue emerged when attempting to transmit the entire message history to the language model (LLM), eventually surpassing the permissible maximum token limit. As a temporary solution, we opted to send only the first two and the last messages as context to the LLM. However, this approach proves suboptimal if your objective is to enable the model to maintain a more comprehensive long-term memory. Consequently, in this tutorial, we will demonstrate how to create a basic implementation of the AutoGPT flow, providing a solution to tackles this issue.

+

The AutoGPT flow is a circular flow that organizes the problem-solving process into four distinct flows:

+
    +
  1. ControllerFlow: Given an a goal and observations (from past executions), it selects from a predefined set of actions, which are explicitly defined in the ExecutorFlow, the next action it should execute to get closer accomplishing its goal. In our configuration, we implement the ControllerFlow using the ChatAtomicFlow

  2. +
  3. ExecutorFlow: Following the action selection by the ControllerFlow, the process moves to the ExecutorFlow. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The ExecutorFlow executes the particular subflow associated with the action chosen by the ControllerFlow. In our setup, the ExecutorFlow includes the following individual flows:

    +
      +
    • WikiSearchAtomicFlow: This flow, given a “search term,” executes a Wikipedia search and returns content related to the search term.

    • +
    • LCToolFlow using DuckDuckGoSearchRun: This flow, given a “query,” queries the DuckDuckGo search API and retrieves content related to the query.

    • +
    +
  4. +
  5. HumanFeedbackFlow: This flow prompts the user for feedback on the latest execution of the ExecutorFlow. The collected feedback is then conveyed back to the ControllerFlow to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the ReActWithHumanFeedbackFlow if the user expresses such a preference.

  6. +
  7. MemoryFlow: This flow is used to read and write and read memories stored of passed conversations in a database. These memories can be passed to the ControllerFlow enabling it to have a long term memory without having to transmit the entire message history to the language model (LLM). It’s implemented with the VectorStoreFlow

  8. +
+

Here’s a broad overview of the AutoGPTFlow:

+
| -------> Memory Flow -------> Controller Flow ------->|
+^                                                       |      
+|                                                       |
+|                                                       v
+| <----- HumanFeedback Flow <------- Executor Flow <----|
+
+
+
+
+

Section 2 Running the AutoGPT Flow

+

In this section, we’ll guide you through running the ReActWithHumanFeedbackFlow.

+

For the code snippets referenced from this point onward, you can find them here.

+

Now, let’s delve into the details without further delay!

+

Similar to the Introducing the FlowVerse with a Simple Q&A Flow tutorial (refer to that tutorial for more insights), we’ll start by fetching some flows from the FlowVerse. Specifically, we’ll fetch the AutoGPTFlowModule, which includes ControllerFlow, ExecutorFlow, and the WikiSearchAtomicFlow. Additionally, we’ll fetch the LCToolFlow, a flow capable of implementing the DuckDuckGo search flow.

+
from aiflows import flow_verse
+# ~~~ Load Flow dependecies from FlowVerse ~~~
+dependencies = [
+    {"url": "aiflows/AutoGPTFlowModule", "revision": "main"},
+    {"url": "aiflows/LCToolFlowModule", "revision": "main"}
+]
+
+flow_verse.sync_dependencies(dependencies)
+
+
+

If you’ve successfully completed the ReAct Tutorial, you are likely familiar with the fact that each flow within the FlowVerse is accompanied by a pip_requirements.txt file detailing external library dependencies. To further explore this, examine the pip_requirements.txt for the LCToolFlowModule, and the pip_requirements.txt for the AutoGPTFlowModule. You’ll observe the necessity to install the following external libraries if they haven’t been installed already:

+
pip install duckduckgo-search==3.9.6
+pip install wikipedia==1.4.0 
+pip install langchain==0.0.336 
+pip install chromadb==0.3.29
+pip install faiss-cpu==1.7.4
+
+
+

Now that we’ve fetched the flows from the FlowVerse and installed their respective requirements, we can start creating our Flow.

+

The configuration for our flow is available in AutoGPT.yaml. We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. AutoGPTFlow’s default config can be found here, the LCToolFlow default config can be found here and memory’s flow default config VectorStoreFlow can be found here

+

Our focus will be on explaining the modified parameters in the configuration, with reference to the ReAct With Human Feedback Tutorial Tutorial for unchanged parameters. +Now let’s look at the flow’s configuration:

+
flow:
+  _target_: flow_modules.aiflows.AutoGPTFlowModule.AutoGPTFlow.instantiate_from_default_config
+  max_rounds: 30
+
+
+
    +
  • _target_: We’re instantiating AutoGPTFlow with its default configuration and introducing some overrides, as specified below.

  • +
  • max_rounds: The maximum number of rounds the flow can run for.

  • +
+

Now let’s look at the flow’s subflows_config, which provides configuration details for ReAct’s subflows—ControllerFlow, the ExecutorFlow, the HumanFeedbackFlow and the MemoryFlow:

+
  ### Subflows specification
+  subflows_config:
+    #ControllerFlow Configuration
+    Controller:
+      _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config
+      commands:
+        wiki_search:
+          description: "Performs a search on Wikipedia."
+          input_args: ["search_term"]
+        ddg_search:
+          description: "Query the search engine DuckDuckGo."
+          input_args: ["query"]
+        finish:
+          description: "Signal that the objective has been satisfied, and returns the answer to the user."
+          input_args: ["answer"]
+      backend:
+        api_infos: ???
+      human_message_prompt_template:
+        template: |2-
+          Here is the response to your last action:
+          {{observation}}
+          Here is the feedback from the user:
+          {{human_feedback}}
+        input_variables:
+          - "observation"
+          - "human_feedback"
+      input_interface_initialized:
+        - "observation"
+        - "human_feedback"
+
+      previous_messages:
+        last_k: 1
+        first_k: 2
+
+
+

The ControllerFlow is identical to ReActWithHumanFeedback.

+
    #ExecutorFlow Configuration
+    Executor:
+      _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config
+      subflows_config:
+        wiki_search:
+          _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config
+        ddg_search:
+          _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config
+          backend:
+            _target_: langchain.tools.DuckDuckGoSearchRun
+
+
+

The ExecutorFlow is identical to ReActWithHumanFeedback and ReAct.

+
    #MemoryFlow Configuration
+    Memory:
+      backend:
+        model_name: none
+        api_infos: ???
+
+
+

The MemoryFlow, primarily instantiated from AutoGPT’s defaut configuration.Additionally, please refer to the MemoryFlow’s FlowCard for more details.

+

With our configuration file in place, we can now proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (commented):

+
# ~~~ Set the API information ~~~
+# OpenAI backend
+api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))]
+# Azure backend
+# api_information = ApiInfo(backend_used = "azure",
+#                           api_base = os.getenv("AZURE_API_BASE"),
+#                           api_key = os.getenv("AZURE_OPENAI_KEY"),
+#                           api_version =  os.getenv("AZURE_API_VERSION") )
+
+
+
+

Next, load the YAML configuration, insert your API information, and define the flow_with_interfaces dictionary as shown below:

+
cfg = read_yaml_file(cfg_path)
+    
+# put the API information in the config
+cfg["flow"]["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information
+cfg["flow"]["subflows_config"]["Memory"]["backend"]["api_infos"] = api_information
+# ~~~ Instantiate the Flow ~~~
+flow_with_interfaces = {
+    "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"),
+    "input_interface": (
+        None
+        if cfg.get("input_interface", None) is None
+        else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False)
+    ),
+    "output_interface": (
+        None
+        if cfg.get("output_interface", None) is None
+        else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False)
+    ),
+}
+
+
+

Lastly, execute the flow using the FlowLauncher.

+
data = {
+    "id": 0,
+    "goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?",
+}
+# At first, we retrieve information about Michael Jordan the basketball player
+# If we provide feedback, only in the first round, that we are not interested in the basketball player,
+#   but the statistician, and skip the feedback in the next rounds, we get the correct answer
+
+# ~~~ Run inference ~~~
+path_to_output_file = None
+# path_to_output_file = "output.jsonl"  # Uncomment this line to save the output to disk
+
+_, outputs = FlowLauncher.launch(
+    flow_with_interfaces=flow_with_interfaces,
+    data=data,
+    path_to_output_file=path_to_output_file,
+)
+
+# ~~~ Print the output ~~~
+flow_output_data = outputs[0]
+print(flow_output_data)
+
+
+

The complete example is accessible here and can be executed as follows:

+
cd examples/AutoGPT
+python run.py
+
+
+

Upon execution, you will be prompted for feedback on the Executor’s answer. The interaction will resemble the following:

+
Relevant information:
+== Goal ==
+Answer the following question: What is the profession and date of birth of Michael Jordan?
+
+== Last Command ==
+wiki_search
+
+== Args
+{'search_term': 'Michael Jordan'}
+
+== Result
+{'wiki_content': 'Michael Jeffrey Jordan (born February 17, 1963), also known by his initials MJ, is an American businessman and former professional basketball player. His profile on the official National Basketball Association (NBA) website states that "by acclamation, Michael Jordan is the greatest basketball player of all time." He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls. He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels. As a freshman, he was a member of the Tar Heels\' national championship team in 1982. Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game\'s best defensive players. His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames "Air Jordan" and "His Airness". Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat. Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season. He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards. During his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan\'s individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award. He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game). In 1999, he was named the 20th century\'s greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press\' list of athletes of the century. Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men\'s Olympic basketball team ("The Dream Team"). He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Ha'}
+
+[2023-12-06 09:30:40,844][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter.
+
+
+

You can respond with:

+
No I'm talking about Michael Irwin Jordan. I think he's a statistician. Maybe look him up on wikipedia?
+
+
+

Subsequently, ReAct will provide a response similar to this:

+
Relevant information:
+== Goal ==
+Answer the following question: What is the profession and date of birth of Michael Jordan?
+
+== Last Command ==
+wiki_search
+
+== Args
+{'search_term': 'Michael Irwin Jordan'}
+
+== Result
+{'wiki_content': 'Michael Irwin Jordan  (born February 25, 1956) is an American scientist, professor at the University of California, Berkeley and researcher in machine learning, statistics, and artificial intelligence.Jordan was elected a member of the National Academy of Engineering in 2010 for contributions to the foundations and applications of machine learning.\nHe is one of the leading figures in machine learning, and in 2016 Science reported him as the world\'s most influential computer scientist.In 2022, Jordan won the inaugural World Laureates Association Prize in Computer Science or Mathematics, "for fundamental contributions to the foundations of machine learning and its application."\n\n\n== Education ==\nJordan received his BS magna cum laude in Psychology in 1978 from the Louisiana State University, his MS in Mathematics in 1980 from Arizona State University and his PhD in Cognitive Science in 1985 from the University of California, San Diego. At the University of California, San Diego, Jordan was a student of David Rumelhart and a member of the Parallel Distributed Processing (PDP) Group in the 1980s.\n\n\n== Career and research ==\nJordan is the Pehong Chen Distinguished Professor at the University of California, Berkeley, where his appointment is split across EECS and Statistics. He was a professor at the Department of Brain and Cognitive Sciences at MIT from 1988 to 1998.In the 1980s Jordan started developing recurrent neural networks as a cognitive model. In recent years, his work is less driven from a cognitive perspective and more from the background of traditional statistics.\nJordan popularised Bayesian networks in the machine learning community and is known for pointing out links between machine learning and statistics. He was also prominent in the formalisation of variational methods for approximate inference and the popularisation of the expectation–maximization algorithm in machine learning.\n\n\n=== Resignation from Machine Learning ===\nIn 2001, Jordan and others resigned from the editorial board of the journal Machine Learning. In a public letter, they argued for less restrictive access and pledged support for a new open access journal, the Journal of Machine Learning Research, which was created by Leslie Kaelbling to support the evolution of the field of machine learning.\n\n\n=== Honors and awards ===\nJordan has received numerous awards, including a best student paper award (with X. Nguyen and M. Wainwright) at the International Conference on Machine Learning (ICML 2004), a best paper award (with R. Jacobs) at the American Control Conference (ACC 1991), the ACM-AAAI Allen Newell Award, the IEEE Neural Networks Pioneer Award, and an NSF Presidential Young Investigator Award. In 2002 he was named an AAAI Fellow "for significant contributions to reasoning under uncertainty, machine learning, and human motor control." In 2004 he was named an IMS Fellow "for contributions to graphical models and machine learning." In 2005 he was named an IEEE Fellow "for '}
+[2023-12-06 09:53:52,058][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter.
+
+
+

Your subsequent response could be:

+
There you go! I think you have it!
+
+
+

Eventually, the flow should terminate and return something similar to:

+
[{'answer': 'Michael Jordan is a scientist, professor, and researcher in machine learning, statistics, and artificial intelligence. He was born on February 25, 1956.', 'status': 'finished'}]
+
+
+

Congratulations you’ve succesfully run AutoGPTFlow !

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/composite_flow.html b/docs/built_with_sphinx/html/getting_started/Tutorial/composite_flow.html new file mode 100644 index 0000000..146c765 --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/composite_flow.html @@ -0,0 +1,315 @@ + + + + + + + + + +Composite Flow Tutorial | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Composite Flow Tutorial

+

Prerequisites: Atomic Flow Tutorial

+

This guide introduces the concept of a composite flow by illustrating the creation of a sequential flow, a specific type of composite flow. The content is structured into two main sections:

+
    +
  1. Section 1: Defining Composite Flows and Sequential Flows

  2. +
  3. Section 2: Writing Your First Sequential Flow

  4. +
+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained insights into the concept of a Composite Flow

  • +
  • Acquired the skills to create a SequentialFlow through a toy example

  • +
  • Developed an understanding of the utility of input and output interfaces in connecting subflows within the flow structure

  • +
+
+
+

Section 1: Defining Composite Flows and Sequential Flows

+

A SequentialFlow entails the sequential execution of a series of flows. It’s a subclass of CompositeFlow.

+

In the paper, a Composite Flow is described as follows:

+
+

Composite Flows accomplish more challenging, higher-level goals by leveraging and coordinating +other Flows. Crucially, thanks to their local state and standardized interface, Composite Flows +can readily invoke Atomic Flows or other Composite Flows as part of compositional, structured +interactions of arbitrary complexity. Enabling research on effective patterns of interaction is one of +the main goals of our work.

+
+

Therefore, a SequentialFlow is a specialized form of CompositeFlow that runs Flows sequentially.

+

Other types of Composite Flows include:

+
    +
  • CircularFlow: A series of flows excuted in a circular fashion (e.g ReAct)

  • +
  • BranchingFlow: A series of flows organized in a parallel fashion. The branch (Flow) executed depends on the input of the branching flow (e.g. BranchingFlow)

  • +
+
+
+

Section 2: Writing Your First Sequential Flow

+

As an introductory example, let’s leverage the atomic flow created in the previous tutorial (Atomic Flow Tutorial) to construct a SequentialFlow. This SequentialFlow will take a number, reverse it, and then reverse it back again.

+

Given the input number 1234, the process should unfold as follows:

+
Input       |          Sequential Flow             |        Output          
+------------|--------------------------------------|--------------
+            |                                      |                        
+1234 -------|---> Flow1 ---> 4321 ---> Flow2 ------|-----> 1234             
+            |                                      |                        
+            |                                      |                        
+
+
+

The flow configuration, presented as a YAML file, is outlined below (you can also review the configuration in reverseNumberSequential.yaml):

+
name: "ReverseNumberTwice"
+description: "A sequential flow that reverses a number twice."
+
+# input and output interfaces of SequentialFlow
+input_interface:
+  - "number"
+
+output_interface:
+  - "output_number"
+
+#configuration of subflows
+subflows_config:
+  first_reverse_flow:
+    _target_: reverse_number_atomic.ReverseNumberAtomicFlow.instantiate_from_default_config
+    name: "ReverseNumberFirst"
+    description: "A flow that takes in a number and reverses it."
+  second_reverse_flow:
+    _target_: reverse_number_atomic.ReverseNumberAtomicFlow.instantiate_from_default_config
+    name: "ReverseNumberSecond"
+    description: "A flow that takes in a number and reverses it."
+
+# Define order of execution of subflows and input & output interfaces for proper execution
+topology:
+  #fist flow to execute
+  - goal: reverse the input number
+    input_interface:
+      _target_: aiflows.interfaces.KeyInterface
+      keys_to_select: ["number"]
+    flow: first_reverse_flow
+    output_interface:
+      _target_: aiflows.interfaces.KeyInterface
+      keys_to_rename:
+        output_number: first_reverse_output
+      keys_to_select: ["first_reverse_output"]
+    reset: false
+  #second flow to execute
+  - goal: reverse the output of the first reverse
+    input_interface:
+      _target_: aiflows.interfaces.KeyInterface
+      keys_to_rename:
+        first_reverse_output: number
+      keys_to_select: ["number"]
+    flow: second_reverse_flow
+    output_interface:
+      _target_: aiflows.interfaces.KeyInterface
+      keys_to_select: ["output_number"]
+    reset: false
+
+
+

Breaking it down:

+
    +
  • The name and description parameters are self-explanatory. When defining a Flow you must always define these parameters

  • +
  • input_interface specifies the expected keys in the input data dictionary passed to the SequentialFlow

  • +
  • output_interface outlines the expected keys in the output data dictionary produced by the SequentialFlow

  • +
  • In the subflows_config, the specification of flows constituating the SequentialFlow are detailed. Each subflow is articulated as a key-item pair within a dictionary. The key denotes the name assigned to the subflow, while the corresponding item is a dictionary encapsulating the configuration of the subflow. In this instance, subflows are outlined with their default configuration, incorporating overrides for the name and description of each flow.

  • +
  • topology defines the order in which flows are executed within our SequentialFlow. +It also specifies the input and output interfaces for each flow. The fields in topology include:

    +
      +
    • goal: A description of the objective of the flow at the given execution step.

    • +
    • flow: The name of the flow to be invoked, matching the name defined in subflows_config.

    • +
    • input_interface: Specifies the transformation to the input data +dictionary before passing it to the current subflow.

    • +
    • output_interface: Specifies the transformation to the output data dictionary +before passing it to the next subflow.

    • +
    • reset: Determines whether to reset the state and history of the flow after calling it (i.e., deletes all message history and key-value pairs (cache) saved in the flow state).

    • +
    +
  • +
+

Note the importance of the transformations defined in the input_interface and output_interface +within the topology. These transformations play a crucial role in establishing a connection +between the two flows. Specifically, the input_interface of the second_reverse_flow includes a transformation +that renames the dictionary key first_reverse_output, which is passed by the first_reverse_flow, to number. +This ensures proper key naming and enables the seamless execution of the subsequent flow.

+

Now let’s instantiate the SequentialFlow (you can also check out the py file +reverse_number_sequential.py):

+
cfg_path = os.path.join(root_dir, "reverseNumberSequential.yaml")
+cfg = read_yaml_file(cfg_path)
+
+# ~~~ Instantiate the flow ~~~
+flow = SequentialFlow.instantiate_from_default_config(**cfg)
+
+
+

There is no need to define any new class +since the SequentialFlow is a base_flow (meaning it’s already defined in the aiFlows library) and we’ve already +defined the ReverseNumberAtomicFlow in the previous tutorial (Atomic Flow Tutorial)

+

With all the preparations in place, we can now proceed to invoke our flow and execute it using the FlowLauncher.

+
# ~~~ Get the data ~~~
+data = {"id": 0, "number": 1234}  # This can be a list of samples
+
+# ~~~ Run inference ~~~
+path_to_output_file = None
+# path_to_output_file = "output.jsonl"
+_, outputs = FlowLauncher.launch(
+    flow_with_interfaces={"flow": flow}, data=data, path_to_output_file=path_to_output_file
+)
+
+# ~~~ Print the output ~~~
+flow_output_data = outputs[0]
+print(flow_output_data)
+
+
+

The complete example is accessible here and can be executed as follows:

+
cd examples/minimal\ reverse\ number/
+python reverse_number_sequential.py
+
+
+

Upon running, the answer you should expect is:

+
[{'output_number': 1234}]
+
+
+
+

Next Tutorial: Introducing the FlowVerse with a Simple Q&A Flow

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.html b/docs/built_with_sphinx/html/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.html new file mode 100644 index 0000000..51bfe4e --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.html @@ -0,0 +1,387 @@ + + + + + + + + + +Introducing the FlowVerse with a Simple Q&A Flow | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Introducing the FlowVerse with a Simple Q&A Flow

+

Prerequisites: setting up your API keys (see setting_up_aiFlows.md), Atomic Flow Tutorial

+

This guide introduces the FlowVerse via an example: minimalQA. The guide is organized in two sections:

+
    +
  1. Section 1: What’s the FlowVerse?

  2. +
  3. Section 2: Crafting a Simple Q&A Flow with the ChatFlowModule

  4. +
+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained an understanding of the FlowVerse and its significance

  • +
  • Acquired the skills to retrieve flows from the FlowVerse

  • +
  • Successfully developed my initial flow by incorporating a FlowVerse flow

  • +
  • Created a Simple Q&A flow capable of managing user-assistant interactions with a Language Model (LLM) through an API

  • +
  • Familiarized myself with the fundamental parameters of the ChatAtomicFlow

  • +
+
+
+

Section 1: What’s the FlowVerse ?

+

The FlowVerse is the hub of flows created and shared by our amazing community for everyone to use! These flows are usually shared on Hugging Face with the intention of being reused by others. Explore our Flows on the FlowVerse here!

+
+
+

Section 2: Crafting a Simple Q&A Flow with the ChatFlowModule

+

In this section, we’ll guide you through the creation of a simple Q&A flow — a single user-assitant interaction with a LLM. We’ll achieve this by leveraging the ChatAtomicFlow from the ChatFlowModule in the FlowVerse. The ChatAtomicFlow seamlessly interfaces with an LLM through an API, generating textual responses for textual input. Powered by the LiteLLM library in the backend, ChatAtomicFlow supports various API providers; explore the full list here.

+

For an in-depth understanding of ChatAtomicFlow, refer to its FlowCard (README). +Note that all the code referenced from this point onwards can be found here

+

Let’s dive in without further delay!

+

First thing to do is to fetch the ChatFlowModule from the FlowVerse (see run_qa_flow.py to see all the code):

+
from aiflows import flow_verse
+# ~~~ Load Flow dependecies from FlowVerse ~~~
+dependencies = [
+    {"url": "aiflows/ChatFlowModule", "revision": "297c90d08087d9ff3139521f11d1a48d7dc63ed4"},
+]
+flow_verse.sync_dependencies(dependencies)
+
+
+

Let’s break this down:

+
    +
  • dependencies is a list of dictionaries (in this case, there’s only one) indicating which FlowModules we want to pull from the FlowVerse. The dictionary contains two key-value pairs:

    +
      +
    • url: Specifies the URL where the flow can be found on Hugging Face. Here, the URL is aiflows/ChatFlowModule, where aiflows is the name of our organization on Hugging Face (or the username of a user hosting their flow on Hugging Face), and ChatFlowModule is the name of the FlowModule containing the ChatAtomicFlow on the FlowVerse. Note that the url is literally the address of the ChatFlowModule on Hugging Face (excluding the https://huggingface.co/). So if you type https://huggingface.co/aiflows/ChatFlowModule in your browser, you will find the Flow.

    • +
    • revision: Represents the revision id (i.e., the full commit hash) of the commit we want to fetch. Note that if you set revision to main, it will fetch the latest commit on the main branch.

    • +
    +
  • +
+

Now that we’ve fetched the ChatAtomicFlowModule from the FlowVerse, we can start creating our Flow.

+

The configuration for our flow is available in simpleQA.yaml. We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The default configuration can be found here

+

Let’s start with the input and output interface:

+
input_interface: # Connector between the "input data" and the Flow
+  _target_: aiflows.interfaces.KeyInterface
+  additional_transformations:
+    - _target_: aiflows.data_transformations.KeyMatchInput # Pass the input parameters specified by the flow
+
+output_interface: # Connector between the Flow's output and the caller
+  _target_: aiflows.interfaces.KeyInterface
+  keys_to_rename:
+    api_output: answer # Rename the api_output to answer
+
+
+
    +
  • input_interface specifies the expected keys in the input data dictionary passed to our flow.

  • +
  • output_interface outlines the expected keys in the output data dictionary produced by our flow.

  • +
+

Now let’s look at the flow’s configuration:

+
flow: # Overrides the ChatAtomicFlow config
+  _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
+
+  name: "SimpleQA_Flow"
+  description: "A flow that answers questions."
+
+
+
    +
  • The _target_ parameter specifies the instantiation method for our flow. In this instance, we’re using it to instantiate the ChatAtomicFlow from its default configuration file

  • +
  • name and description: self-explanatory parameters

  • +
+
  # ~~~ Input interface specification ~~~
+  input_interface_non_initialized:
+    - "question"
+
+
+
    +
  • The input_interface_non_initialized parameter in our configuration specifies the keys expected in the input data dictionary when the ChatAtomicFlow is called for the first time (i.e., when the system prompt is constructed). Essentially, it serves a role similar to the regular input_interface. The distinction becomes apparent when you require different inputs for the initial query compared to subsequent queries. For instance, in ReAct, the first time you query the LLM, the input is provided by a human, such as a question. In subsequent queries, the input comes from the execution of a tool (e.g. a query to wikipedia). In ReAct’s case, these two scenarios are distinguished by ChatAtomicFlow’s input_interface_non_initialized and input_interface_initialized parameters. For this tutorial, as we’re creating a simple Q&A flow performing a single user-assistant interaction with an LLM, we never use input_interface_initialized (which is why it’s not defined in the configuration).

  • +
+
  # ~~~ backend model parameters ~~
+  backend:
+    _target_: aiflows.backends.llm_lite.LiteLLMBackend
+    api_infos: ???
+    model_name:
+      openai: "gpt-3.5-turbo"
+      azure: "azure/gpt-4"
+
+    # ~~~ generation_parameters ~~
+    n: 1
+    max_tokens: 3000
+    temperature: 0.3
+
+    top_p: 0.2
+    frequency_penalty: 0
+    presence_penalty: 0
+
+
+
    +
  • backend is a dictionary containing parameters specific to the LLM. These parameters include:

    +
      +
    • api_infos Your API information (which will be passed later for privacy reasons).

    • +
    • model_name A dictionary with key-item pairs, where keys correspond to the backend_used attribute of the ApiInfo class for the chosen backend, and values represent the desired model for that backend. Model selection depends on the provided api_infos. Additional models can be added for different backends, following LiteLLM’s naming conventions (refer to LiteLLM’s supported providers and model names here). For instance, with an Anthropic API key and a desire to use “claude-2,” one would check Anthropic’s model details here. As “claude-2” is named the same in LiteLLM, the model_name dictionary would be updated as follows:

      +
      backend:
      +_target_: aiflows.backends.llm_lite.LiteLLMBackend
      +api_infos: ???
      +model_name:
      +  openai: "gpt-3.5-turbo"
      +  azure: "azure/gpt-4"
      +  anthropic: "claude-2"
      +
      +
      +
    • +
    • n,max_tokens,top_p, frequency_penalty, presence_penalty are generation parameters for LiteLLM’s completion function (refer to all possible generation parameters here).

    • +
    +
  • +
+
  # ~~~ Prompt specification ~~~
+  system_message_prompt_template:
+    _target_: aiflows.prompt_template.JinjaPrompt
+    template: |2-
+      You are a helpful chatbot that truthfully answers questions.
+    input_variables: []
+    partial_variables: {}
+
+  init_human_message_prompt_template:
+    _target_: aiflows.prompt_template.JinjaPrompt
+    template: |2-
+      Answer the following question: {{question}}
+    input_variables: ["question"]
+    partial_variables: {}
+
+
+
    +
  • system_message_prompt_template: This is the system prompt template passed to the LLM.

  • +
  • init_human_message_prompt_template: This is the user prompt template passed to the LLM the first time the flow is called. It includes the following parameters:

    +
      +
    • template The prompt template in Jinja format.

    • +
    • input_variables The input variables of the prompt. For instance, in our case, the prompt template +is “Answer the following question: {{question}},” and our input_variables is “question.” Before querying the LLM, the prompt template is rendered by placing the input variable “question” in the placeholder “{{question}}” of the prompt template. It’s worth noting that input_interface_non_initialized == input_variables. This alignment is intentional, as they are passed as input_variables to the init_human_message_prompt_template to render the template

    • +
    +
  • +
+

Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment):

+
 # ~~~ Set the API information ~~~
+# OpenAI backend
+
+api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))]
+
+# # Azure backend
+# api_information = [ApiInfo(backend_used = "azure",
+#                           api_base = os.getenv("AZURE_API_BASE"),
+#                           api_key = os.getenv("AZURE_OPENAI_KEY"),
+#                           api_version =  os.getenv("AZURE_API_VERSION") )]
+
+# # Anthropic backend
+#api_information = [ApiInfo(backend_used= "anthropic",api_key = os.getenv("ANTHROPIC_API_KEY"))]
+
+
+
+

Next, load the YAML configuration, insert your API information, and define the flow_with_interfaces dictionary:

+

+cfg_path = os.path.join(root_dir, "simpleQA.yaml")
+cfg = read_yaml_file(cfg_path)
+# put api information in config (done like this for privacy reasons)
+cfg["flow"]["backend"]["api_infos"] = api_information
+
+# ~~~ Instantiate the Flow ~~~
+flow_with_interfaces = {
+    "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"),
+    "input_interface": (
+        None
+        if cfg.get("input_interface", None) is None
+        else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False)
+    ),
+    "output_interface": (
+        None
+        if cfg.get("output_interface", None) is None
+        else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False)
+    ),
+}
+
+
+

Finally, run the flow with FlowLauncher.

+
# ~~~ Get the data ~~~
+data = {"id": 0, "question": "Who was the NBA champion in 2023?"}  # This can be a list of samples
+
+# ~~~ Run inference ~~~
+path_to_output_file = None
+# path_to_output_file = "output.jsonl"  # Uncomment this line to save the output to disk
+
+_, outputs = FlowLauncher.launch(
+    flow_with_interfaces=flow_with_interfaces, data=data, path_to_output_file=path_to_output_file
+)
+
+# ~~~ Print the output ~~~
+flow_output_data = outputs[0]
+print(flow_output_data)
+
+
+

The full example is available here and can be executed as follows:

+
cd examples/minimal\ QA/
+python run_qa_flow.py 
+
+
+

Upon running, the answer is similar to the following:

+
[{'answer': "I'm sorry, but as an AI language model, I don't have access to real-time information or the ability to predict future events. As of now, I cannot provide you with the answer to who the NBA champion was in 2023. I recommend checking reliable sports news sources or conducting an internet search for the most up-to-date information."}]
+
+
+

To learn how to obtain information on the 2023 NBA Champion using Flows, refer to the next tutorial ReAct, a Flow that provides ChatAtomicFlow to tools like search engines!

+

Additionally, the minimal QA folder contains other examples using ChatAtomicFlow such as:

+
    +
  • Running a Flow with Demonstrations (encouraging the LLM to finshis answers with “my sire”). To run:

    +
    cd examples/minimal\ QA/
    +python run_qa_flow_w_demonstrations.py
    +
    +
    +
  • +
  • Running the Simple Q&A flow in a multithreaded fashion in order answer multiple questions with mulitple API_keys or providers. To run:

    +
    cd examples/minimal\ QA/
    +python run_qa_flow_multithreaded.py
    +
    +
    +
  • +
+
+

Next Tutorial: ReAct Tutorial

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/reAct.html b/docs/built_with_sphinx/html/getting_started/Tutorial/reAct.html new file mode 100644 index 0000000..870255e --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/reAct.html @@ -0,0 +1,354 @@ + + + + + + + + + +ReAct Tutorial | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

ReAct Tutorial

+

Prequisites: setting up your API keys (see setting_up_aiFlows.md), Introducing the FlowVerse with a Simple Q&A Flow Tutorial, Atomic Flow Tutorial, Composite Flow Tutorial

+

This guide introduces an implementation of the ReAct flow. The guide is organized in two sections:

+
    +
  1. Section 1: What’s The ReAct Flow ?

  2. +
  3. Section 2: Running the ReAct Flow

  4. +
+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained an understanding of the ReAct flow and its significance

  • +
  • Acquired the skills to pull multiple flows from the FlowVerse with external library dependencies

  • +
  • Successfully developed my first personalized ReAct flow

  • +
  • Familiarized myself with the essential parameters of the ControllerExecutorFlow

  • +
+
+
+

Section 1: What’s The ReAct Flow ?

+

The ReAct flow, as introduced in ReAct: Synergizing Reasoning and Acting in Language Models, represents a Circular flow that organizes the problem-solving process into two distinct flows:

+
    +
  1. ControllerFlow: With a specified goal and past observations from prior executions, the ControllerFlow makes decisions by choosing the next action from a predefined set. These actions are explicitly defined in the ExecutorFlow and contribute to progressing towards the defined goal. In our configuration, we implement the ControllerFlow using the ChatAtomicFlow.

  2. +
  3. ExecutorFlow: Following the action selection by the ControllerFlow, the process moves to the ExecutorFlow. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The ExecutorFlow executes the particular subflow associated with the action chosen by the ControllerFlow. In our setup, the ExecutorFlow includes the following individual flows:

    +
      +
    • WikiSearchAtomicFlow: This flow, given a “search term,” executes a Wikipedia search and returns content related to the search term.

    • +
    • LCToolFlow using DuckDuckGoSearchRun: This flow, given a “query,” queries the DuckDuckGo search API and retrieves content related to the query.

    • +
    +
  4. +
+

These steps are repeated until an answer is obtained.

+
+
+

Section 2: Running The ReAct Flow

+

In this section, we’ll guide you through running the ReAct Flow.

+

For the code snippets referenced from this point onward, you can find them here

+

Now, let’s delve into the details without further delay!

+

Similar to the Introducing the FlowVerse with a Simple Q&A Flow tutorial (refer to that tutorial for more insights), we’ll start by fetching some flows from the FlowVerse. Specifically, we’ll fetch the ControllerExecutorFlowModule, which includes the ControllerExecutorFlow (the composite flow of ControllerFlow and ExecutorFlow) and the WikiSearchAtomicFlow. Additionally, we’ll fetch the LCToolFlow, a flow capable of implementing the DuckDuckGo search flow.

+
from aiflows import flow_verse
+# ~~~ Load Flow dependecies from FlowVerse ~~~
+dependencies = [
+    {"url": "aiflows/LCToolFlowModule", "revision": "main"},
+    {"url": "aiflows/ControllerExecutorFlowModule", "revision": "main"},
+]
+
+flow_verse.sync_dependencies(dependencies)
+
+
+

Each flow on the FlowVerse includes a pip_requirements.txt file for external library dependencies. Check out the pip_requirements.txt for the LCToolFlowModule) and pip_requirements.txt for the ControllerExecutorFlowModule. You’ll notice the need to install the following external libraries:

+
pip install wikipedia==1.4.0
+pip install langchain==0.0.336
+pip install duckduckgo-search==3.9.6
+
+
+

Now that we’ve fetched the flows from the FlowVerse and installed their respective requirements, we can start creating our flow.

+

The configuration for our flow is available in ReAct.yaml. We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The ControllerExecutorFlow’s default config can be found here and the LCToolFlow default config can be found here. +Now let’s look at the flow’s configuration:

+
flow:
+  _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerExecutorFlow.instantiate_from_default_config
+  max_rounds: 30
+
+
+
    +
  • The _target_ parameter specifies the instantiation method for our flow. In this instance, we’re using it to instantiate the ControllerExecutorFlow from its default configuration file.

  • +
  • max_rounds: The maximum number of rounds the flow can run for.

  • +
+

Now let’s look at the flow’s subflows_config, which provides configuration details for ReAct’s subflows—ControllerFlow and the ExecutorFlow:

+
  ### Subflows specification
+  subflows_config:
+    Controller:
+      _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config
+      commands:
+        wiki_search:
+          description: "Performs a search on Wikipedia."
+          input_args: ["search_term"]
+        ddg_search:
+          description: "Query the search engine DuckDuckGo."
+          input_args: ["query"]
+        finish:
+          description: "Signal that the objective has been satisfied, and returns the answer to the user."
+          input_args: ["answer"]
+      backend:
+        _target_: aiflows.backends.llm_lite.LiteLLMBackend
+        api_infos: ???
+        model_name:
+          openai: "gpt-3.5-turbo"
+          azure: "azure/gpt-4"
+
+
+
    +
  • Controller: The configuration of the controller flow:

    +
      +
    • commands: A dictionary containing the set of actions the ControllerFlow can call. Each key of the dictionary is the name of the action it can excute and it’s items are a another dictionary containing the following parameters:

      +
        +
      • description: A description of what the action does (it’s important to be clear since these descriptions are passed to the system prompt to explain to the LLM what each action can do)

      • +
      • input_args: The list of arguments required by a given action

      • +
      +
    • +
    • backend: The backend used by the ControllerFlow (see the previous tutorial Introducing the FlowVerse with a Simple Q&A Flow for a more detailed description of the backend)

    • +
    +
  • +
+
    Executor:
+      _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config
+      subflows_config:
+        wiki_search:
+          _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config
+        ddg_search:
+          _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config
+          backend:
+            _target_: langchain.tools.DuckDuckGoSearchRun
+
+
+
    +
  • Executor: The configuration of the ExecutorFlow:

    +
      +
    • subflows_config: The configuration of the subflows of the ExecutorFlow. Each subflow corresponds to an action defined in the ControllerFlow through the commands parameter. It is noteworthy that the names of the command keys align with the names of the subflows in the Executor’s subflow_config

    • +
    +
  • +
+

Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment):

+
 # ~~~ Set the API information ~~~
+# OpenAI backend
+api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))]
+# Azure backend
+# api_information = [ApiInfo(backend_used = "azure",
+#                           api_base = os.getenv("AZURE_API_BASE"),
+#                           api_key = os.getenv("AZURE_OPENAI_KEY"),
+#                           api_version =  os.getenv("AZURE_API_VERSION") )]
+
+
+

Next, load the YAML configuration, insert your API information, +and define the flow_with_interfaces dictionary:

+
path_to_output_file = None
+# path_to_output_file = "output.jsonl"  # Uncomment this line to save the output to disk
+
+root_dir = "."
+cfg_path = os.path.join(root_dir, "ReAct.yaml")
+cfg = read_yaml_file(cfg_path)
+# put the API information in the config
+cfg["flow"]["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information
+
+# ~~~ Instantiate the Flow ~~~
+flow_with_interfaces = {
+    "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"),
+    "input_interface": (
+        None
+        if cfg.get("input_interface", None) is None
+        else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False)
+    ),
+    "output_interface": (
+        None
+        if cfg.get("output_interface", None) is None
+        else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False)
+    ),
+}
+
+
+

Finally, run the flow with FlowLauncher.

+
 # ~~~ Get the data ~~~
+# This can be a list of samples
+# data = {"id": 0, "goal": "Answer the following question: What is the population of Canada?"}  # Uses wikipedia
+data = {"id": 0, "goal": "Answer the following question: Who was the NBA champion in 2023?"}
+
+# ~~~ Run inference ~~~
+path_to_output_file = None
+# path_to_output_file = "output.jsonl"  # Uncomment this line to save the output to disk
+
+_, outputs = FlowLauncher.launch(
+    flow_with_interfaces=flow_with_interfaces, data=data, path_to_output_file=path_to_output_file
+)
+
+# ~~~ Print the output ~~~
+flow_output_data = outputs[0]
+print(flow_output_data)
+
+
+

The full example is available here and can be executed as follows:

+
cd examples/ReAct
+python run.py
+
+
+

Upon execution, the result appears as follows:

+
[{'answer': 'The NBA champion in 2023 was the Denver Nuggets.', 'status': 'finished'}]
+
+
+

Finally we have the correct answer!

+

However, let’s consider a scenario where you want to instruct ReAct:

+
+

Answer the following question: What is the profession and date of birth of Michael Jordan?

+
+

Where Michael Jordan is the Professor of Electrical Engineering and Computer Sciences and Professor of Statistics at Berkley. If you run this with ReAct, the obtained answer might look like this:

+
[{'answer': 'Michael Jordan is a former professional basketball player and an American businessman. He was born on February 17, 1963.', 'status': 'finished'}]
+
+
+

Which is not what we wanted ! This output does not align with our intended question.

+

To discover how to retrieve information on Michael Jordan, the Berkeley Professor, using aiFlows, refer to the next tutorial ReActWithHumanFeedback, a flow that incorporates human feedback into the ReAct flow!

+
+

Next Tutorial: ReAct With Human Feedback Tutorial

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/reActwHumanFeedback.html b/docs/built_with_sphinx/html/getting_started/Tutorial/reActwHumanFeedback.html new file mode 100644 index 0000000..927791d --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/reActwHumanFeedback.html @@ -0,0 +1,507 @@ + + + + + + + + + +ReAct With Human Feedback Tutorial | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

ReAct With Human Feedback Tutorial

+

Prequisites: setting up your API keys (see setting_up_aiFlows.md), Introducing the FlowVerse with a Simple Q&A Flow Tutorial, ReAct Tutorial

+

This guide introduces an implementation of the ReAct flow. It’s organized in two sections:

+
    +
  1. Section 1: What’s The ReAct With Human Feedback Flow ?

  2. +
  3. Section 2: Running the ReAct With Human Feedback Flow

  4. +
+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Recognized the distinctions between ReAct and ReActWithHumanFeedback and their consequences

  • +
  • Learned how to integrate a human feedback flow into ReAct

  • +
  • Incorporated customized functions into the input and output interfaces.

  • +
  • Grasped the limitations of ReAct, particularly its lack of long-term memory

  • +
  • Deepened my understanding of the key parameters in the ControllerExecutorFlow configuration

  • +
+
+
+

Section 1: What’s The ReAct With Human Feedback Flow ?

+

In the previous tutorial (ReAct Tutorial), we introduced the ReAct flow. We noticed towards the end that, eventhough it works well, it can fail in some situations. For example, consider you ask the following:

+
+

Answer the following question: What is the profession and date of birth of Michael Jordan?

+
+

In scenarios where the mentioned “Michael Jordan” refers to the Professor of Electrical Engineering and Computer Sciences and Professor of Statistics at Berkeley, ReAct may misinterpret it as the basketball player Michael Jordan and provide information about the latter. To address this, we can introduce an additional flow in our circular flow, allowing users to provide feedback on intermediate answers. This tutorial will guide you through the creation of the ReActWithHumanFeedback flow to handle such situations.

+

The ReActWithHumanFeedback flow is a circular flow that organizes the problem-solving process into three distinct flows:

+
    +
  1. ControllerFlow: With a specified goal and past observations from prior executions, the ControllerFlow makes decisions by choosing the next action from a predefined set. These actions are explicitly defined in the ExecutorFlow and contribute to progressing towards the defined goal. In our configuration, we implement the ControllerFlow using the ChatAtomicFlow.

  2. +
  3. ExecutorFlow: Following the action selection by the ControllerFlow, the process moves to the ExecutorFlow. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The ExecutorFlow executes the particular subflow associated with the action chosen by the ControllerFlow. In our setup, the ExecutorFlow includes the following individual flows:

    +
      +
    • WikiSearchAtomicFlow: This flow, given a “search term,” executes a Wikipedia search and returns content related to the search term.

    • +
    • LCToolFlow using DuckDuckGoSearchRun: This flow, given a “query,” queries the DuckDuckGo search API and retrieves content related to the query.

    • +
    +
  4. +
  5. HumanFeedbackFlow: This flow prompts the user for feedback on the latest execution of the ExecutorFlow. The collected feedback is then conveyed back to the ControllerFlow to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the ReActWithHumanFeedbackFlow if the user expresses such a preference.

  6. +
+
+
+

Section 2: Running the ReAct With Human Feedback Flow

+

In this section, we’ll guide you through running the ReActWithHumanFeedbackFlow.

+

For the code snippets referenced from this point onward, you can find them here.

+

Now, let’s delve into the details without further delay!

+

Similar to the Introducing the FlowVerse with a Simple Q&A Flow tutorial (refer to that tutorial for more insights), we’ll start by fetching some flows from the FlowVerse. Specifically, we’ll fetch the ControllerExecutorFlowModule, which includes the ControllerExecutorFlow (the composite flow of ControllerFlow and ExecutorFlow) and the WikiSearchAtomicFlow. Additionally, we’ll fetch the LCToolFlow, a flow capable of implementing the DuckDuckGo search flow, and the HumanStandardInputFlowModule, a flow capable of gathering human feedback.

+
from aiflows import flow_verse
+# ~~~ Load Flow dependecies from FlowVerse ~~~
+dependencies = [
+    {"url": "aiflows/ControllerExecutorFlowModule", "revision": "main"},
+    {"url": "aiflows/HumanStandardInputFlowModule", "revision": "main"},
+    {"url": "aiflows/LCToolFlowModule", "revision": "main"},
+]
+
+flow_verse.sync_dependencies(dependencies)
+
+
+

If you’ve successfully completed the preceding tutorial, ReAct Tutorial, you are likely familiar with the fact that each flow within the FlowVerse is accompanied by a pip_requirements.txt file detailing external library dependencies. To further explore this, examine the pip_requirements.txt for the LCToolFlowModule, the pip_requirements.txt for the ControllerExecutorFlowModule, and the pip_requirements.txt for the HumanStandardInputFlowModule. You’ll observe the necessity to install the following external libraries if they haven’t been installed already:

+
pip install wikipedia==1.4.0
+pip install langchain==0.0.336
+pip install duckduckgo-search==3.9.6
+
+
+

Next, in order to empower the HumanStandardInputFlow to terminate the ReActWithHumanFeedback flow, it is essential to implement a function in the ControllerExecutorFlow class for this specific purpose. Consequently, a new class, ReActWithHumanFeedback, is introduced as follows (you can find it in ReActWithHumandFeedback.py):

+
from typing import Dict, Any
+
+from aiflows.base_flows import CircularFlow
+from flow_modules.aiflows.ControllerExecutorFlowModule import ControllerExecutorFlow
+
+class ReActWithHumanFeedback(ControllerExecutorFlow):
+    @CircularFlow.output_msg_payload_processor
+    def detect_finish_in_human_input(self, output_payload: Dict[str, Any], src_flow) -> Dict[str, Any]:
+        human_feedback = output_payload["human_input"]
+        if human_feedback.strip().lower() == "q":
+            return {
+                "EARLY_EXIT": True,
+                "answer": "The user has chosen to exit before a final answer was generated.",
+                "status": "unfinished",
+            }
+
+        return {"human_feedback": human_feedback}
+
+
+

Note that, we’ve simply added one function to the class which initiates the procedure to terminate the flow should the user enter “q” when prompted for feedback.

+

The configuration for our flow is available in ReActWithHumanFeedback.yaml. We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The ControllerExecutorFlow’s default config can be found here and the LCToolFlow default config can be found here.

+

Our focus will be on explaining the modified parameters in the configuration, with reference to the previous tutorial for unchanged parameters. +Now let’s look at the flow’s configuration:

+
max_rounds: 30
+
+
+
    +
  • max_rounds: The maximum number of rounds the flow can run for.

  • +
+

Now let’s look at the flow’s subflows_config, which provides configuration details for ReAct’s subflows—ControllerFlow, the ExecutorFlow and the HumanFeedbackFlow:

+
### Subflows specification
+subflows_config:
+  #ControllerFlow
+  Controller:
+    _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config
+    backend:
+      api_infos: ???
+    commands:
+      wiki_search:
+        description: "Performs a search on Wikipedia."
+        input_args: ["search_term"]
+      ddg_search:
+        description: "Query the search engine DuckDuckGo."
+        input_args: ["query"]
+      finish:
+        description: "Signal that the objective has been satisfied, and returns the answer to the user."
+        input_args: ["answer"]
+
+    human_message_prompt_template:
+      template: |2-
+        Here is the response to your last action:
+        {{observation}}
+        Here is the feedback from the user:
+        {{human_feedback}}
+      input_variables:
+        - "observation"
+        - "human_feedback"
+    input_interface_initialized:
+      - "observation"
+      - "human_feedback"
+
+    previous_messages:
+      first_k: 2 # keep the system prompt and the original goal
+      last_k: 1 # keep only the last message
+
+
+

Note that the ControllerFlow configuration remains nearly identical to that in the previous tutorial, ReAct Tutorial. The only differences are:

+
    +
  • The inclusion of an extra argument, “human_feedback,” in both the input_interface_initialized parameter and the input_variables pararameter of the human_message_prompt_template. This is to incorporate the human’s feedback in the message fed to the ContollerFlow

  • +
  • Implementation of a mechanism to limit the number of previous_messages from the flow’s chat history that is input to the Language Model (LLM). This limitation is crucial to prevent the Language Model (LLM) from exceeding the maximum token limit. Two parameters are overriden for this purpose:

    +
      +
    • first_k: Adds the first_k earliest messages of the flow’s chat history to the input of the LLM.

    • +
    • last_k: Adds the last_k latest messages of the flow’s chat history to the input of the LLM.M

    • +
    +
  • +
+
  #ExecutorFlow   
+  Executor:
+    _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config
+    subflows_config:
+      wiki_search:
+        _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config
+      ddg_search:
+        _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config
+        backend:
+          _target_: langchain.tools.DuckDuckGoSearchRun
+
+
+

The ExecutorFlow is identical to ReAct.

+
  HumanFeedback:
+    _target_: flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
+    request_multi_line_input_flag: False
+    query_message_prompt_template:
+      template: |2-
+        Please provide feedback on the last step.
+
+        Relevant information:
+        == Goal ==
+        {{goal}}
+
+        == Last Command ==
+        {{command}}
+
+        == Args
+        {{command_args}}
+
+        == Result
+        {{observation}}
+      input_variables:
+        - "goal"
+        - "command"
+        - "command_args"
+        - "observation"
+    input_interface:
+      - "goal"
+      - "command"
+      - "command_args"
+      - "observation"
+
+
+

HumanFeedback:

+
    +
  • request_multi_line_input_flag: This boolean parameter determines whether the user/human is prompted to enter a multi-line input (True) or a single-line input (False).

  • +
  • query_message_prompt_template: This parameter involves a prompt template used to generate the message presented to the human. It includes:

    +
      +
    • template: The prompt template in Jinja format.

    • +
    • input_variables The input variables of the prompt. Note that these input variables have the same names as the placeholders “{{}}” in the template. Before querying the human, the template is rendered by placing the input_variables in the placeholders of the template.

    • +
    +
  • +
  • input_interface: Describes the expected input interface for the flow. It’s noteworthy that the input_interface is identical to the input_variables of the query_message_prompt_template. This alignment is intentional, as they are passed as input_variables to the query_message_prompt_template to render the message presented to the user.

  • +
+
topology: # The first two are the same as in the ControllerExecutorFlow
+  - goal: "Select the next action and prepare the input for the executor."
+    input_interface:
+      _target_: aiflows.interfaces.KeyInterface
+      additional_transformations:
+        - _target_: aiflows.data_transformations.KeyMatchInput
+    flow: Controller
+    output_interface:
+      _target_: ControllerExecutorFlow.detect_finish_or_continue
+    reset: false
+
+  - goal: "Execute the action specified by the Controller."
+    input_interface:
+      _target_: aiflows.interfaces.KeyInterface
+      keys_to_rename:
+        command: branch
+        command_args: branch_input_data
+      keys_to_select: ["branch", "branch_input_data"]
+    flow: Executor
+    output_interface:
+      _target_: aiflows.interfaces.KeyInterface
+      keys_to_rename:
+        branch_output_data: observation
+      keys_to_select: ["observation"]
+    reset: false
+
+  - goal: "Ask the user for feedback."
+    input_interface:
+      _target_: aiflows.interfaces.KeyInterface
+    flow: HumanFeedback
+    output_interface:
+      _target_: ReActWithHumanFeedback.detect_finish_in_human_input
+    reset: false
+
+
+

The default topology of the ControllerExecutorFlow is overriden here:

+
    +
  • For more details on topology, refer to the tutorial Composite Flow.

  • +
  • The topology of the ControllerExecutorFlow’s default config is available here.

  • +
  • Upon comparison with the default config’s topology, one would observe that the sole alteration is the incorporation of the HumanFeedbackFlow to the circular flow.

  • +
  • Note the significance of including the detect_finish_in_human_input function from the ReActWithHumanFeedback class in the output interface. This function, as defined earlier, plays a crucial role in initiating the process of terminating the flow if the human/user provides “q” as feedback.

  • +
+

Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment):

+
# ~~~ Set the API information ~~~
+# OpenAI backend
+api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))]
+# Azure backend
+# api_information = ApiInfo(backend_used = "azure",
+#                           api_base = os.getenv("AZURE_API_BASE"),
+#                           api_key = os.getenv("AZURE_OPENAI_KEY"),
+#                           api_version =  os.getenv("AZURE_API_VERSION") )
+
+
+

Next, load the YAML configuration, insert your API information, +and define the flow_with_interfaces dictionary:

+
path_to_output_file = None
+# path_to_output_file = "output.jsonl"  # Uncomment this line to save the output to disk
+root_dir = "."
+cfg_path = os.path.join(root_dir, "ReActWithHumanFeedback.yaml")
+cfg = read_yaml_file(cfg_path)
+# put the API information in the config
+cfg["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information
+flow = ReActWithHumanFeedback.instantiate_from_default_config(**cfg)
+
+# ~~~ Instantiate the Flow ~~~
+flow_with_interfaces = {
+    "flow": flow,
+    "input_interface": (
+        None
+        if cfg.get("input_interface", None) is None
+        else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False)
+    ),
+    "output_interface": (
+        None
+        if cfg.get("output_interface", None) is None
+        else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False)
+    ),
+}
+
+
+

Finally, run the flow with FlowLauncher.

+
 data = {
+        "id": 0,
+        "goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?",
+}
+# At first, we retrieve information about Michael Jordan the basketball player
+# If we provide feedback, only in the first round, that we are not interested in the basketball player,
+#   but the statistician, and skip the feedback in the next rounds, we get the correct answer
+
+# ~~~ Run inference ~~~
+path_to_output_file = None
+# path_to_output_file = "output.jsonl"  # Uncomment this line to save the output to disk
+
+_, outputs = FlowLauncher.launch(
+    flow_with_interfaces=flow_with_interfaces,
+    data=data,
+    path_to_output_file=path_to_output_file,
+)
+
+# ~~~ Print the output ~~~
+flow_output_data = outputs[0]
+print(flow_output_data)
+
+
+

The complete example is accessible here and can be executed as follows:

+
cd examples/ReActWithHumanFeedback
+python run.py
+
+
+

Upon execution, you will be prompted for feedback on the Executor’s answer. The interaction will resemble the following:

+
Relevant information:
+== Goal ==
+Answer the following question: What is the profession and date of birth of Michael Jordan?
+
+== Last Command ==
+wiki_search
+
+== Args
+{'search_term': 'Michael Jordan'}
+
+== Result
+{'wiki_content': 'Michael Jeffrey Jordan (born February 17, 1963), also known by his initials MJ, is an American businessman and former professional basketball player. His profile on the official National Basketball Association (NBA) website states that "by acclamation, Michael Jordan is the greatest basketball player of all time." He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls. He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels. As a freshman, he was a member of the Tar Heels\' national championship team in 1982. Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game\'s best defensive players. His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames "Air Jordan" and "His Airness". Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat. Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season. He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards. During his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan\'s individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award. He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game). In 1999, he was named the 20th century\'s greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press\' list of athletes of the century. Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men\'s Olympic basketball team ("The Dream Team"). He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Ha'}
+
+[2023-12-06 09:30:40,844][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter.
+
+
+

You can respond with:

+
No I'm talking about Michael Irwin Jordan. I think he's a statistician. Maybe look him up on wikipedia?
+
+
+

Subsequently, ReAct will provide a response similar to this:

+
Relevant information:
+== Goal ==
+Answer the following question: What is the profession and date of birth of Michael Jordan?
+
+== Last Command ==
+wiki_search
+
+== Args
+{'search_term': 'Michael Irwin Jordan'}
+
+== Result
+{'wiki_content': 'Michael Irwin Jordan  (born February 25, 1956) is an American scientist, professor at the University of California, Berkeley and researcher in machine learning, statistics, and artificial intelligence.Jordan was elected a member of the National Academy of Engineering in 2010 for contributions to the foundations and applications of machine learning.\nHe is one of the leading figures in machine learning, and in 2016 Science reported him as the world\'s most influential computer scientist.In 2022, Jordan won the inaugural World Laureates Association Prize in Computer Science or Mathematics, "for fundamental contributions to the foundations of machine learning and its application."\n\n\n== Education ==\nJordan received his BS magna cum laude in Psychology in 1978 from the Louisiana State University, his MS in Mathematics in 1980 from Arizona State University and his PhD in Cognitive Science in 1985 from the University of California, San Diego. At the University of California, San Diego, Jordan was a student of David Rumelhart and a member of the Parallel Distributed Processing (PDP) Group in the 1980s.\n\n\n== Career and research ==\nJordan is the Pehong Chen Distinguished Professor at the University of California, Berkeley, where his appointment is split across EECS and Statistics. He was a professor at the Department of Brain and Cognitive Sciences at MIT from 1988 to 1998.In the 1980s Jordan started developing recurrent neural networks as a cognitive model. In recent years, his work is less driven from a cognitive perspective and more from the background of traditional statistics.\nJordan popularised Bayesian networks in the machine learning community and is known for pointing out links between machine learning and statistics. He was also prominent in the formalisation of variational methods for approximate inference and the popularisation of the expectation–maximization algorithm in machine learning.\n\n\n=== Resignation from Machine Learning ===\nIn 2001, Jordan and others resigned from the editorial board of the journal Machine Learning. In a public letter, they argued for less restrictive access and pledged support for a new open access journal, the Journal of Machine Learning Research, which was created by Leslie Kaelbling to support the evolution of the field of machine learning.\n\n\n=== Honors and awards ===\nJordan has received numerous awards, including a best student paper award (with X. Nguyen and M. Wainwright) at the International Conference on Machine Learning (ICML 2004), a best paper award (with R. Jacobs) at the American Control Conference (ACC 1991), the ACM-AAAI Allen Newell Award, the IEEE Neural Networks Pioneer Award, and an NSF Presidential Young Investigator Award. In 2002 he was named an AAAI Fellow "for significant contributions to reasoning under uncertainty, machine learning, and human motor control." In 2004 he was named an IMS Fellow "for contributions to graphical models and machine learning." In 2005 he was named an IEEE Fellow "for '}
+[2023-12-06 09:53:52,058][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter.
+
+
+

Your subsequent response could be:

+
There you go! I think you have it!
+
+
+

Eventually, the flow should terminate and return something similar to:

+
[{'answer': 'Michael Jordan is a scientist, professor, and researcher in machine learning, statistics, and artificial intelligence. He was born on February 25, 1956.', 'status': 'finished'}]
+
+
+

Finally, it provides the correct answer!

+

Nevertheless, persisting with the use of ReActWithHumanFeedback may reveal an inherent challenge, particularly in prolonged conversations. The primary issue arises when attempting to pass the entire message history to the language model (LLM), eventually surpassing the maximum token limit allowable. As a workaround, we currently send only the first two and the last messages as context to the LLM. However, this approach is suboptimal if you desire your model to maintain a more comprehensive long-term memory.

+

To address this limitation, we recommend exploring the subsequent tutorial, AutoGPT Tutorial. This tutorial introduces a fundamental implementation of AutoGPT, enhancing the ReAct flow by incorporating a Memory Flow. This addition tackles the challenge of managing longer conversations.

+
+

Next Tutorial: AutoGPT Tutorial

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/setting_up_aiFlows.html b/docs/built_with_sphinx/html/getting_started/Tutorial/setting_up_aiFlows.html new file mode 100644 index 0000000..7fab0ac --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/setting_up_aiFlows.html @@ -0,0 +1,277 @@ + + + + + + + + + +Setting up aiFlows | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Setting up aiFlows

+

Welcome to a straightforward tutorial in which we walk you through a suggested setup that will provide you with a smooth and efficient workflow.

+

Let’s dive right in. This document is a tutorial for setting up the following:

+
    +
  1. Section 1: Installing aiFlows

  2. +
  3. Section 2: Setting Up The FlowVerse

  4. +
  5. Section 3: Setting Up Your API Keys

  6. +
+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Installed the aiFlows library successfully

  • +
  • Established an organized file structure for seamless collaboration within the FlowVerse

  • +
  • Set up a Hugging Face account for contribution to the FlowVerse (Optional)

  • +
  • Configured and activated my API keys

  • +
+
+
+

Section 1: Installing aiFlows

+

Begin the installation process for aiFlows with Python 3.10+ using:

+
pip install aiflows
+
+
+

Alternatively, for a manual installation:

+
git clone https://github.com/epfl-dlab/aiflows.git
+cd aiflows
+conda create --name flows python=3.10
+conda activate flows
+pip install -e .
+
+
+
+
+

Section 2: Setting Up The FlowVerse

+
+

Step 1: Setting up efficient Folder Structure

+

Create a dedicated folder for the FlowVerse, following our recommended structure:

+
mkdir FlowVerse
+
+
+

Following the download of your initial Flows from the FlowVerse, your folder arrangement should look like this:

+
|-- YourProject
+|-- flow_modules
+|      |-- Flow1
+|      |-- Flow2
+|      |-- ...
+
+
+

This ensures all your Flows are conveniently centralized in a single place, simplifying management.

+
+
+

Step 2: Optional - Linking Hugging Face for FlowVerse Push

+

To facilitate FlowVerse pushing, it’s essential to link your Hugging Face account:

+
    +
  1. Begin by creating a Hugging Face account at huggingface and verify your email.

  2. +
  3. Log in to Hugging Face in the terminal using:

    +
      +
    • For terminal login, you’ll need an access token. If you haven’t already, created one (a read token should be sufficient)

    • +
    • Enter the following command in the terminal, and when prompted, paste your access token:

      +
      huggingface-cli login
      +
      +
      +
    • +
    +
  4. +
+

This process is essential for the smooth integration of Hugging Face with FlowVerse, ensuring effortless pushing.

+
+
+
+

Section 3: Setting Up Your API Keys

+

In this final step, let’s configure your API keys as environment variables for your conda environment. We’ll demonstrate how to set up keys for both OpenAI and Azure. Note that, thanks to LiteLLM, a variety of providers are available—explore them here: https://docs.litellm.ai/docs/providers

+
    +
  • If you’re using openAI:

    +
      +
    • write in your terminal:

      +
      conda env config vars set OPENAI_API_KEY=<YOUR-OPEN-AI-API_KEY>
      +
      +
      +
    • +
    • reactivate your conda environment:

      +
      conda activate <NAME_OF_YOUR_ENVIRONMENT>
      +
      +
      +
    • +
    • To make sure that your key has been set as an environment variable (your environment variables should appear):

      +
      conda env config vars list
      +
      +
      +
    • +
    +
  • +
  • If you’re using Azure:

    +
      +
    • write in your terminal:

      +
      conda env config vars set AZURE_OPENAI_KEY=<YOUR-AZURE_OPENAI_KEY>
      +conda env config vars set AZURE_API_BASE=<YOUR-AZURE_API_BASE>
      +conda env config vars set AZURE_API_VERSION=<YOUR-AZURE_API_VERSION>
      +
      +
      +
    • +
    • reactivate your conda environment:

      +
      conda activate <NAME_OF_YOUR_ENVIRONMENT>
      +
      +
      +
    • +
    • To make sure that your key has been set as an environment variable (your environment variables should appear):

      +
      conda env config vars list
      +
      +
      +
    • +
    +
  • +
+

Congratulations! You are now equipped to seamlessly work with aiFlows. Happy flowing!

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/Tutorial/tutorial_landing_page.html b/docs/built_with_sphinx/html/getting_started/Tutorial/tutorial_landing_page.html new file mode 100644 index 0000000..d67616b --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/Tutorial/tutorial_landing_page.html @@ -0,0 +1,268 @@ + + + + + + + + + +Tutorials | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Tutorials

+

Welcome to the exciting world of aiFlows! 🚀 These tutorials are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, we recommend following the tutorials in the given order. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path.

+

Get ready for an engaging journey where you’ll build practical skills and gain a deeper understanding of the power and versatility of aiFlows.

+

Let’s dive in and explore the following tutorials !

+
+

1. Setting up aiFlows

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Installed the aiFlows library successfully

  • +
  • Established an organized file structure for seamless collaboration within the FlowVerse

  • +
  • Set up a Hugging Face account for contribution to the FlowVerse (Optional)

  • +
  • Configured and activated my API keys

  • +
+
+
+
+

2. Atomic Flow Tutorial

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained insight into the relationship among a Flow, an input interface, and an output interface

  • +
  • Acquired hands-on experience in creating an AtomicFlow with the example of ReverseNumberAtomic

  • +
  • Learned how to run a flow with a FlowLauncher

  • +
+
+
+
+

3. Composite Flow Tutorial

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained insights into the concept of a Composite Flow

  • +
  • Acquired the skills to create a SequentialFlow through a toy example

  • +
  • Developed an understanding of the utility of input and output interfaces in connecting subflows within the flow structure

  • +
+
+
+
+

4. Introducing the FlowVerse with a Simple Q&A Flow

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained an understanding of the FlowVerse and its significance

  • +
  • Acquired the skills to retrieve flows from the FlowVerse

  • +
  • Successfully developed my initial flow by incorporating a FlowVerse flow

  • +
  • Created a Simple Q&A flow capable of managing user-assistant interactions with a Language Model (LLM) through an API

  • +
  • Familiarized myself with the fundamental parameters of the ChatAtomicFlow

  • +
+
+
+
+

5. ReAct Tutorial

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained an understanding of the ReAct flow and its significance

  • +
  • Acquired the skills to pull multiple flows from the FlowVerse with external library dependencies

  • +
  • Successfully developed my first personalized ReAct flow

  • +
  • Familiarized myself with the essential parameters of the ControllerExecutorFlow

  • +
+
+
+
+

6. ReAct With Human Feedback Tutorial

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Recognized the distinctions between ReAct and ReActWithHumanFeedback and their consequences

  • +
  • Learned how to integrate a human feedback flow into ReAct

  • +
  • Incorporated customized functions into the input and output interfaces.

  • +
  • Grasped the limitations of ReAct, particularly its lack of long-term memory

  • +
  • Deepened my understanding of the key parameters in the ControllerExecutorFlow configuration

  • +
+
+
+
+

7. AutoGPT Tutorial

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Acknowledged the differences between AutoGPT and ReActWithHumanFeedback and their implications

  • +
  • Gained proficiency in executing the AutoGPTFlow

  • +
  • Enhanced comprehension of intricate flow structures

  • +
+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/detailed_examples/autogpt.html b/docs/built_with_sphinx/html/getting_started/detailed_examples/autogpt.html new file mode 100644 index 0000000..5ea85df --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/detailed_examples/autogpt.html @@ -0,0 +1,477 @@ + + + + + + + + + +AutoGPT | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

AutoGPT

+
+

Definition

+

The AutoGPT flow is a circular flow that organizes the problem-solving process into four distinct subflows:

+
    +
  1. ControllerFlow: Given an a goal and observations (from past executions), it selects from a predefined set of actions, which are explicitly defined in the ExecutorFlow, the next action it should execute to get closer accomplishing its goal. In our configuration, we implement the ControllerFlow using the ChatAtomicFlow

  2. +
  3. ExecutorFlow: Following the action selection by the ControllerFlow, the process moves to the ExecutorFlow. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The ExecutorFlow executes the particular subflow associated with the action chosen by the ControllerFlow. In our setup, the ExecutorFlow includes the following individual flows:

    +
      +
    • WikiSearchAtomicFlow: This flow, given a “search term,” executes a Wikipedia search and returns content related to the search term.

    • +
    • LCToolFlow using DuckDuckGoSearchRun: This flow, given a “query,” queries the DuckDuckGo search API and retrieves content related to the query.

    • +
    +
  4. +
  5. HumanFeedbackFlow: This flow prompts the user for feedback on the latest execution of the ExecutorFlow. The collected feedback is then conveyed back to the ControllerFlow to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the ReActWithHumanFeedbackFlow if the user expresses such a preference.

  6. +
  7. MemoryFlow: This flow is used to read and write and read memories stored of passed conversations in a database. These memories can be passed to the ControllerFlow enabling it to have a long term memory without having to transmit the entire message history to the language model (LLM). It’s implemented with the VectorStoreFlow

  8. +
+
+
+

Topology

+

The sequence of execution for AutoGPT’s flows is circular and follows this specific order:

+
    +
  1. The MemoryFlow retrieves relevant information from memory

  2. +
  3. The ControllerFlow selects the next action to execute and prepares the input for the ExecutorFlow

  4. +
  5. The ExecutorFlow executes the action specified by the ControllerFlow

  6. +
  7. The HumanFeedbackFlow asks the user for feedback

  8. +
  9. The MemoryFlow writes relevant information to memory

  10. +
+

Here’s a broad overview of the AutoGPTFlow:

+
| -------> Memory Flow -------> Controller Flow ------->|
+^                                                       |      
+|                                                       |
+|                                                       v
+| <----- HumanFeedback Flow <------- Executor Flow <----|
+
+
+
+
+

Subflows

+
+

Memory Flow

+

We utilize the ChromaDBFlow from the VectorStoreFlowModule as the MemoryFlow. For a detailed understanding of its parameters, refer to its FlowCard for an extensive description of its parameters.

+

Like every flow, when ChromaDBFlow’s run is called function is called:

+
def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]:
+        """ This method runs the flow. It runs the ChromaDBFlow. It either writes or reads memories from the database.
+        
+        :param input_data: The input data of the flow.
+        :type input_data: Dict[str, Any]
+        :return: The output data of the flow.
+        :rtype: Dict[str, Any]
+        """
+        api_information = self.backend.get_key()
+
+        if api_information.backend_used == "openai":
+            embeddings = OpenAIEmbeddings(openai_api_key=api_information.api_key)
+        else:
+            # ToDo: Add support for Azure
+            embeddings = OpenAIEmbeddings(openai_api_key=os.getenv("OPENAI_API_KEY"))
+        response = {}
+
+        operation = input_data["operation"]
+        if operation not in ["write", "read"]:
+            raise ValueError(f"Operation '{operation}' not supported")
+
+        content = input_data["content"]
+        if operation == "read":
+            if not isinstance(content, str):
+                raise ValueError(f"content(query) must be a string during read, got {type(content)}: {content}")
+            if content == "":
+                response["retrieved"] = [[""]]
+                return response
+            query = content
+            query_result = self.collection.query(
+                query_embeddings=embeddings.embed_query(query),
+                n_results=self.flow_config["n_results"]
+            )
+
+            response["retrieved"] = [doc for doc in query_result["documents"]]
+
+        elif operation == "write":
+            if content != "":
+                if not isinstance(content, list):
+                    content = [content]
+                documents = content
+                self.collection.add(
+                    ids=[str(uuid.uuid4()) for _ in range(len(documents))],
+                    embeddings=embeddings.embed_documents(documents),
+                    documents=documents
+                )
+            response["retrieved"] = ""
+
+        return response
+
+
+

One can notice that ChromaDBFlow acts as an encapsulation for chromadb’s vector store-backend memory, which offers support for two types of operations:

+
    +
  • read: This operation involves retrieving the n_results most relevant documents from ChromaDB based on the provided content.

  • +
  • write: This operation is utilised to add the given content to VectorDB.

  • +
+
+

Additional Documentation:

+ +
+
+
+

ControllerFlow

+

We utilize the ControllerAtomicFlow from the ControllerExecutorFlowModule as the ControllerFlow. For a detailed understanding of its parameters, refer to its FlowCard for an extensive description of its parameters.

+

ControllerAtomicFlow’s run function looks like this:

+
def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]:
+        """ This method runs the flow. Note that the response of the LLM is in the JSON format, but it's not a hard constraint (it can hallucinate and return an invalid JSON)
+        
+        :param input_data: The input data of the flow.
+        :type input_data: Dict[str, Any]
+        :return: The output data of the flow (thought, reasoning, criticism, command, command_args)
+        :rtype: Dict[str, Any]
+        """
+        api_output = super().run(input_data)["api_output"].strip()
+        response = json.loads(api_output)
+        return response
+
+
+

The run function is a straightforward wrapper around ChatAtomicFlow. The Language Model (LLM) responds in JSON format, but this isn’t strictly enforced—it may occasionally return an invalid JSON. The soft constraint is set by the system prompt, detailed in its default configuration. This configuration specifies the expected output format and describes the available commands it has access to (these are the subflows of the ExecutorFlow). The system prompt template is as follows:

+
system_message_prompt_template:
+  _target_: aiflows.prompt_template.JinjaPrompt
+  template: |2-
+    You are a smart AI assistant. 
+    
+    Your decisions must always be made independently without seeking user assistance.
+    Play to your strengths as an LLM and pursue simple strategies with no legal complications.
+    If you have completed all your tasks, make sure to use the "finish" command.
+
+    Constraints:
+    1. No user assistance
+    2. Exclusively use the commands listed in double quotes e.g. "command name"
+
+    Available commands:
+    {{commands}}
+
+    Resources:
+    1. Internet access for searches and information gathering.
+    2. Long Term memory management.
+
+    Performance Evaluation:
+    1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.
+    2. Constructively self-criticize your big-picture behavior constantly.
+    3. Reflect on past decisions and strategies to refine your approach.
+    4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.
+    You should only respond in JSON format as described below
+    Response Format:
+    {
+    "thought": "thought",
+    "reasoning": "reasoning",
+    "plan": "- short bulleted\n- list that conveys\n- long-term plan",
+    "criticism": "constructive self-criticism",
+    "speak": "thoughts summary to say to user",
+    "command": "command name",
+    "command_args": {
+        "arg name": "value"
+        }
+    }
+    Ensure your responses can be parsed by Python json.loads
+input_variables: ["commands"]
+
+
+

Where “{{commands}}” is the placeholder for the available commands which are added to the template when the ControllerAtomicFlow is being instantiated.

+

The goal and observations (from past executions) are passed via the human_message_prompt and the init_human_message_prompt who are the following:

+
human_message_prompt_template:
+    template: |2
+    Potentially relevant information retrieved from your memory:
+    {{memory}}
+    =================
+    Here is the response to your last action:
+    {{observation}}
+    Here is the feedback from the user:
+    {{human_feedback}}
+    input_variables:
+    - "observation"
+    - "human_feedback"
+    - "memory"
+input_interface_initialized:
+    - "observation"
+    - "human_feedback"
+    - "memory"
+
+
+
+

Additional Documentation:

+ +
+
+
+

ExecutorFlow

+

We utilize a BranchingFlow from aiFlow’s codebase as the ExecutorFlow. The ExecutorFlow by default has two subflows which are the available commands the ControllerFlow can call:

+
+

1. The LCToolFlow

+

The LCToolFlow is an atomic flow functioning as an interface for LangChain tools. This flow operates by taking a tool_input, which corresponds to the tool’s keyword arguments, as its input, and then provides the observation as its output.

+
 def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]:
+    """ This method runs the flow. It runs the backend on the input data.
+    
+    :param input_data: The input data of the flow.
+    :type input_data: Dict[str, Any]
+    :return: The output data of the flow.
+    :rtype: Dict[str, Any]
+    """
+    observation = self.backend.run(tool_input=input_data)
+
+    return {"observation": observation}
+
+
+

Using a tool with the LCToolFlow is a straightforward process. By setting the desired tool as the backend’s _target_, you can seamlessly integrate it into your workflow. For a comprehensive list of compatible tools, please refer to the Integrations section in LangChain’s Tool documentation.

+
- _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config
+  overrides:
+    name: "ddg_search"
+    backend:
+      _target_: langchain.tools.DuckDuckGoSearchRun
+
+
+
+
+

2. The WikiSearchAtomicFlow

+

The WikiSearchAtomicFlow is also atomic flow and functions as an interface for Wikipedia’s API. Given a search_term, it can execute a search on wikipedia and fetch page summaries to eventually pass it back to the ControllerFlow

+
def run(self,
+            input_data: Dict[str, Any]) -> Dict[str, Any]:
+        """ Runs the WikiSearch Atomic Flow. It's used to execute a Wikipedia search and get page summaries.
+        
+        :param input_data: The input data dictionary
+        :type input_data: Dict[str, Any]
+        :return: The output data dictionary
+        :rtype: Dict[str, Any]
+        """
+
+        # ~~~ Process input ~~~
+        term = input_data.get("search_term", None)
+        api_wrapper = WikipediaAPIWrapper(
+            lang=self.flow_config["lang"],
+            top_k_results=self.flow_config["top_k_results"],
+            doc_content_chars_max=self.flow_config["doc_content_chars_max"]
+        )
+
+        # ~~~ Call ~~~
+        if page_content := api_wrapper._fetch_page(term):
+            search_response = {"wiki_content": page_content, "relevant_pages": None}
+        else:
+            page_titles = api_wrapper.search_page_titles(term)
+            search_response = {"wiki_content": None, "relevant_pages": f"Could not find [{term}]. similar: {page_titles}"}
+
+        # Log the update to the flow messages list
+        observation = search_response["wiki_content"] if search_response["wiki_content"] else search_response["relevant_pages"]
+        return {"wiki_content": observation}
+
+
+
+
+

Additional Documentation:

+ +
+
+
+

Human Feedback Flow

+

We utilize the HumanStandadInputFlow from the HumanStandardInputFlowModule as the HumanFeedbackFlow. For a detailed understanding of its parameters, refer to its FlowCard for an extensive description of its parameters.

+

Its run function enables users to provide feedback at the conclusion of each iteration. This feedback is subsequently appended to the observation generated by the ExecutorFlow. By doing so, the feedback becomes part of the memory, thereby influencing the agent’s decision-making process.

+
def run(self,
+            input_data: Dict[str, Any]) -> Dict[str, Any]:
+        """ Runs the HumanStandardInputFlow. It's used to read input from the user/human's standard input.
+        
+        :param input_data: The input data dictionary
+        :type input_data: Dict[str, Any]
+        :return: The output data dictionary
+        :rtype: Dict[str, Any]
+        """
+
+        query_message = self._get_message(self.query_message_prompt_template, input_data)
+        state_update_message = UpdateMessage_Generic(
+            created_by=self.flow_config['name'],
+            updated_flow=self.flow_config["name"],
+            data={"query_message": query_message},
+        )
+        self._log_message(state_update_message)
+
+        log.info(query_message)
+        human_input = self._read_input()
+
+        return {"human_input": human_input}
+
+
+

In the current context, if the user enters the command q, the flow triggers an early exit by setting the early exit key to True, which leads to the termination of the Flow.

+
+

Additional Documentation:

+ +
+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/detailed_examples/chat_flow.html b/docs/built_with_sphinx/html/getting_started/detailed_examples/chat_flow.html new file mode 100644 index 0000000..74f06a7 --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/detailed_examples/chat_flow.html @@ -0,0 +1,241 @@ + + + + + + + + + +ChatAtomicFlow | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

ChatAtomicFlow

+
+

Definition

+

The ChatAtomicFlow is a flow that seamlessly interfaces with an LLM through an API, generating textual responses for textual inputs. Powered by the LiteLLM library in the backend, ChatAtomicFlow supports various API providers; explore the full list here. For a detailed understanding of its parameters, refer to its FlowCard for an extensive description of its parameters.

+
+
+

Methods

+

In this section, we’ll explore some o ChatAtomicFlow’s methods, specifically those invoked when it is called.

+

Just like every flow, ChatAtomicFlow is called via the run method:

+
def run(self,input_data: Dict[str, Any]):
+        """ This method runs the flow. It processes the input, calls the backend and updates the state of the flow.
+        
+        :param input_data: The input data of the flow.
+        :type input_data: Dict[str, Any]
+        :return: The LLM's api output.
+        :rtype: Dict[str, Any]
+        """
+        
+        # ~~~ Process input ~~~
+        self._process_input(input_data)
+
+        # ~~~ Call ~~~
+        response = self._call()
+        
+        #loop is in case there was more than one answer (n>1 in generation parameters)
+        for answer in response:
+            self._state_update_add_chat_message(
+                role=self.flow_config["assistant_name"],
+                content=answer
+            )
+        response = response if len(response) > 1 or len(response) == 0 else response[0]
+        return {"api_output": response}
+
+
+

As you can see in the code snippet here above, run processes the input data of the flow via the _process_input method. Let’s take a closer look at what it does:

+
def _process_input(self, input_data: Dict[str, Any]):
+        """ This method processes the input of the flow. It adds the human message to the flow's state. If the conversation is not initialized, it also initializes it
+        (adding the system message and potentially the demonstrations).
+        
+        :param input_data: The input data of the flow.
+        :type input_data: Dict[str, Any]
+        """
+        if self._is_conversation_initialized():
+            # Construct the message using the human message prompt template
+            user_message_content = self._get_message(self.human_message_prompt_template, input_data)
+
+        else:
+            # Initialize the conversation (add the system message, and potentially the demonstrations)
+            self._initialize_conversation(input_data)
+            if getattr(self, "init_human_message_prompt_template", None) is not None:
+                # Construct the message using the query message prompt template
+                user_message_content = self._get_message(self.init_human_message_prompt_template, input_data)
+            else:
+                user_message_content = self._get_message(self.human_message_prompt_template, input_data)
+
+        self._state_update_add_chat_message(role=self.flow_config["user_name"],
+                                            content=user_message_content)
+
+
+

This function prepares the user message prompt for submission to the Language Model (LLM) by inserting the input_data into the placeholders of the user prompt template (details of which will be explained later). The choice of user prompt sent to the LLM depends on whether the conversation has been initiated or not (i.e., whether the flow has been called):

+
    +
  • If the conversation has not been initialized, the message is constructed using the init_human_message_prompt_template. In this case, the expected input interface for the flow is specified by input_interface_non_initialized.

  • +
  • If the conversation has been initialized, the message is constructed using the human_message_prompt_template. In this case, the expected input interface for the flow is specified by input_interface_initialized.

  • +
+

This distinction proves useful when different inputs are needed for the initial query compared to subsequent queries to the flow. For example, in ReAct, the first query to the LLM is initiated by a human, such as asking a question. In subsequent queries, the input is derived from the execution of a tool (e.g., a query to Wikipedia). In ReAct’s implementation, these two scenarios are differentiated by ChatAtomicFlow’s input_interface_non_initialized and input_interface_initialized, which define the input interface for the flow.

+

ChatAtomicFlow’s default configuration defines user prompt templates as so:

+
init_human_message_prompt_template:
+  _target_: aiflows.prompt_template.JinjaPrompt
+
+human_message_prompt_template:
+  _target_: aiflows.prompt_template.JinjaPrompt
+  template: "{{query}}"
+  input_variables:
+    - "query"
+input_interface_initialized:
+  - "query"
+
+
+

This signifies that init_human_message_prompt_template represents an empty string message, while the rendered message for human_message_prompt_template is derived from the previous flow’s query. This is achieved by placing the input variable “query” (from input_dict) into the {{query}} placeholder of the prompt template.

+

Finally, the run function calls the LLM via the LiteLLM library, saves the message in it’s flow state and sends the output to the next flow.

+

Additional Documentation:

+ +
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/detailed_examples/detailed_example_landing_page.html b/docs/built_with_sphinx/html/getting_started/detailed_examples/detailed_example_landing_page.html new file mode 100644 index 0000000..517bd86 --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/detailed_examples/detailed_example_landing_page.html @@ -0,0 +1,204 @@ + + + + + + + + + +Detailed Examples | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Detailed Examples

+

Welcome to the exciting world of aiFlows! 🚀 These guides are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, we recommend following the guides in the given order. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path.

+

Get ready for an engaging journey where you’ll build practical skills and gain a deeper understanding of the power and versatility of aiFlows.

+

Let’s dive in and explore the following guides !

+
+

1. ChatAtomicFlow

+
+

By the Guide’s End, I Will Have…

+
    +
  • Gained insight into the execution flow of ChatAtomicFlow

  • +
  • Acquired an understanding of how ChatAtomicFlow processes inputs

  • +
  • Identified the documentation resource for ChatAtomicFlow

  • +
+
+
+
+

2. VisionAtomicFlow

+
+

By the Guide’s End, I Will Have…

+
    +
  • Gained insight into the execution flow of VisionAtomicFlow

  • +
  • Acquired an understanding of how VisionAtomicFlow processes inputs

  • +
  • Recognized the similarities between VisionAtomicFlow and ChatAtomicFlow

  • +
  • Identified the documentation resource for VisionAtomicFlow

  • +
+
+
+
+

3. AutoGPTFlow

+

Note: This guide is also useful if you’re interested in ReAct since the two share lots of similarities

+
+

By the Guide’s End, I Will Have…

+
    +
  • Understood the purpose of AutoGPTFlow

  • +
  • Explored the functionalities of AutoGPT’s subflows

  • +
  • Identified the documentation resource for AutoGPTFlow and its subflows

  • +
+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/detailed_examples/vision_flow.html b/docs/built_with_sphinx/html/getting_started/detailed_examples/vision_flow.html new file mode 100644 index 0000000..50c772a --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/detailed_examples/vision_flow.html @@ -0,0 +1,251 @@ + + + + + + + + + +Vision Atomic Flow | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Vision Atomic Flow

+

Prequisite: Chat Atomic Flow

+
+

Definition

+

The VisionAtomicFlow is a flow that seamlessly interfaces with an LLM through an API, . It is a flow that, given a textual input, and a set of images and/or videos, generates a textual output. Powered by the LiteLLM library in the backend, VisionAtomicFlow supports various API providers; explore the full list here. For a detailed understanding of its parameters, refer to its FlowCard for an extensive description of its parameters.

+
+
+

Methods

+

In this section, we’ll delve into some of the methods within the VisionAtomicFlow class, specifically those invoked when it is called.

+

If you examine the VisionAtomicFlow class, you’ll observe the following:

+
    +
  1. It’s a class that inherits from the ChatAtomicFlow.

  2. +
  3. There is no run method explicitly defined, and as a result, it shares the same run method as ChatAtomicFlow, which is the method always called when a flow is invoked.

  4. +
+

Here is the run method of VisionAtomicFlow:

+
def run(self,input_data: Dict[str, Any]):
+        """ This method runs the flow. It processes the input, calls the backend and updates the state of the flow.
+        
+        :param input_data: The input data of the flow.
+        :type input_data: Dict[str, Any]
+        :return: The LLM's api output.
+        :rtype: Dict[str, Any]
+        """
+        
+        # ~~~ Process input ~~~
+        self._process_input(input_data)
+
+        # ~~~ Call ~~~
+        response = self._call()
+        
+        #loop is in case there was more than one answer (n>1 in generation parameters)
+        for answer in response:
+            self._state_update_add_chat_message(
+                role=self.flow_config["assistant_name"],
+                content=answer
+            )
+        response = response if len(response) > 1 or len(response) == 0 else response[0]
+        return {"api_output": response}
+
+
+

In the provided code snippet, observe that the run method handles the input data of the flow through the _process_input method. Let’s delve into a closer examination of its functionality:

+
def _process_input(self, input_data: Dict[str, Any]):
+        """ This method processes the input data (prepares the messages to send to the API).
+        
+        :param input_data: The input data.
+        :type input_data: Dict[str, Any]
+        :return: The processed input data.
+        :rtype: Dict[str, Any]
+        """
+        if self._is_conversation_initialized():
+            # Construct the message using the human message prompt template
+            user_message_content = self.get_user_message(self.human_message_prompt_template, input_data)
+
+        else:
+            # Initialize the conversation (add the system message, and potentially the demonstrations)
+            self._initialize_conversation(input_data)
+            if getattr(self, "init_human_message_prompt_template", None) is not None:
+                # Construct the message using the query message prompt template
+                user_message_content = self.get_user_message(self.init_human_message_prompt_template, input_data)
+            else:
+                user_message_content = self.get_user_message(self.human_message_prompt_template, input_data)
+
+        self._state_update_add_chat_message(role=self.flow_config["user_name"],
+                                            content=user_message_content)
+
+
+

When calling _process_input(input_data) in VisionAtomicFlow, the flow generates its user message prompt similarly to ChatAtomicFlow (refer to ChatAtomicFlow’s detailed example). However, due to a slight modification in the get_user_message method compared to ChatAtomicFlow, it also includes one or multiple images or videos in the input.

+
 @staticmethod
+    def get_user_message(prompt_template, input_data: Dict[str, Any]):
+        """ This method constructs the user message to be passed to the API.
+        
+        :param prompt_template: The prompt template to use.
+        :type prompt_template: PromptTemplate
+        :param input_data: The input data.
+        :type input_data: Dict[str, Any]
+        :return: The constructed user message (images , videos and text).
+        :rtype: Dict[str, Any]
+        """
+        content = VisionAtomicFlow._get_message(prompt_template=prompt_template,input_data=input_data)
+        media_data = input_data["data"]
+        if "video" in media_data:
+            content = [ content[0], *VisionAtomicFlow.get_video(media_data["video"])]
+        if "images" in media_data:
+            images = [VisionAtomicFlow.get_image(image) for image in media_data["images"]]
+            content.extend(images)
+        return content
+
+
+

Note that images can be passed either via a URL (an image on the internet) or by providing the path to a local image. However, videos must be local videos.

+

Finally, the run function calls the LLM via the LiteLLM library, saves the message in it’s flow state and sends the textual output to the next flow.

+

Additional Documentation:

+ +
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/developer_guide/developper_guide_landing_page.html b/docs/built_with_sphinx/html/getting_started/developer_guide/developper_guide_landing_page.html new file mode 100644 index 0000000..ef52f77 --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/developer_guide/developper_guide_landing_page.html @@ -0,0 +1,187 @@ + + + + + + + + + +Developer’s Guide | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Developer’s Guide

+

Welcome to the exciting world of aiFlows! 🚀 These guides are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, we recommend following the tutorials in the given order. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path.

+

Get ready for an engaging journey where you’ll build practical skills and gain a deeper understanding of the power and versatility of aiFlows.

+

Let’s dive in and explore the following guides !

+
+

1. Flow Module Management

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained a clear understanding of pulling flows from the FlowVerse.

  • +
  • Mastered the handling of flows that depend on other flows.

  • +
+
+
+
+

2. Typical Developer Workflows

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Learned how to Create a Flow

  • +
  • Learned how to Test a Flow

  • +
  • Learned how to Publish a Flow

  • +
  • Learned how to contributing to an existing flow

  • +
+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/developer_guide/flow_module_management.html b/docs/built_with_sphinx/html/getting_started/developer_guide/flow_module_management.html new file mode 100644 index 0000000..35f20d3 --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/developer_guide/flow_module_management.html @@ -0,0 +1,239 @@ + + + + + + + + + +Flow Module Management | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Flow Module Management

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Gained a clear understanding of pulling flows from the FlowVerse.

  • +
  • Mastered the handling of flows that depend on other flows.

  • +
+
+
+

Introduction

+

The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, these Flows can be readily downloaded, used, extended or composed into novel, more complex Flows. For the ones using ChatGPT, you could think of them as open-source GPTs(++).

+

In the heart of this platform, the community shares their unique Flows, encapsulated in what we call flow modules.

+
+
+

Flow Modules

+ +
+
+

Syncing Flow Modules

+

To use or import a flow module, first sync it to the flow_modules directory in your root directory. You can then import it like any local Python package. Consider the following trivial_sync_demo.py, which relies on nbaldwin/ChatFlows:

+
dependencies = [
+    {"url": "nbaldwin/ChatInteractiveFlowModule", "revision": "main"},
+]
+from aiflows import flow_verse
+flow_verse.sync_dependencies(dependencies)
+
+from flow_modules.nbaldwin.ChatInteractiveFlowModule import ChatHumanFlowModule
+
+if __name__ == "__main__":
+	print("This is a trivial sync demo.")
+
+
+

This synchronization process, though it may seem unconventional at first, provides a number of advantages:

+
    +
  • The synchronization process examines the implementation of remote flow modules seamlessly, eliminating the need to switch between your integrated development * environment (IDE) and a web page.

  • +
  • It extends existing implementations effortlessly without the requirement to download or clone the repository manually.

  • +
+
+
+

Flow Module Namespace

+
    +
  • Remote flow modules are identified by their Hugging Face repository ID and revision, such as nbaldwin/ChatInteractiveFlowModule:main.

  • +
  • Each locally synchronized flow module manifests as a valid Python package within the flow_modules directory, exemplified by structures like flow_modules.nbaldwin.ChatInteractiveFlowModule. Importantly, only one revision is retained for each remote flow module, a practice upheld to ensure clarity and manage revision conflicts. Should a conflict arise, a warning will guide you to select the preferred version.

  • +
+

For a visual representation, consider the following directory structure:

+
(aiflows)   dev-tutorial tree .
+.
+├── flow_modules
+│   ├── aiflows
+│      └── ChatFlowModule
+│          ├── ...
+│          ├── ChatAtomicFlow.py
+│          ├── ChatAtomicFlow.yaml
+│          ├── ...
+│          ├── ...
+│          └── __pycache__
+│              ├── ChatAtomicFlow.cpython-39.pyc
+│              └── __init__.cpython-39.pyc
+│   └── nbaldwin
+│       └── ChatInteractiveFlowModule
+│           ├── ...
+│           ├── ChatHumanFlowModule.py
+│           ├── ChatHumanFlowModule.yaml
+│           ├── README.md
+│           ├── ...
+│           └── __pycache__
+│               ├── ChatHumanFlowModule.cpython-39.pyc
+│               └── __init__.cpython-39.pyc
+└── trivial_sync_demo.py
+
+9 directories, 16 files
+
+
+

In this illustration, the nbaldwin/ChatInteractiveFlowModule flow module relies on the remote flow module aiflows/ChatAtomicFlow. Both dependencies are seamlessly synchronized under the flow_modules directory. The synchronization and importation of dependencies mirror each other, ensuring a consistent and logical approach across remote and local development environments.

+
+

Next Tutorial: Typical Developer Workflows

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/developer_guide/typical_developer_workflows.html b/docs/built_with_sphinx/html/getting_started/developer_guide/typical_developer_workflows.html new file mode 100644 index 0000000..cce3dec --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/developer_guide/typical_developer_workflows.html @@ -0,0 +1,375 @@ + + + + + + + + + +Typical Developer Workflows | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Typical Developer Workflows

+

prerequisites: Flow Module Management

+
+

Creating, Testing, and Publishing Your Own Flow Module

+
+

By the Tutorial’s End, I Will Have…

+
    +
  • Learned how to Create a Flow

  • +
  • Learned how to Test a Flow

  • +
  • Learned how to Publish a Flow

  • +
  • Learned how to contributing to an existing flow

  • +
+
+
+

Creating Your Own Flow Module

+

To start, create a local directory where you’ll develop your flow module:

+
(aiflows)   dev-tutorial mkdir PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots
+(aiflows)   dev-tutorial cd PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots
+(aiflows)   dev_UsefulChatBots touch __init__.py
+(aiflows)   dev_UsefulChatBots touch .gitignore
+(aiflows)   dev_UsefulChatBots touch EconomicExpertBot.py
+(aiflows)   dev_UsefulChatBots git init
+(aiflows)   dev_UsefulChatBots git:(main)  git add .
+(aiflows)   dev_UsefulChatBots git:(main)  git commit -m "initial commit"
+[main (root-commit) e592fd1] initial commit
+3 files changed, 0 insertions(+), 0 deletions(-)
+create mode 100644 .gitignore
+create mode 100644 EconomicExpertBot.py
+create mode 100644 __init__.py
+
+
+

Next, we could either develop from scratch as in Tutorial for AtomicFlow or we could leverage an existing flow module and build upon it. In this tutorial, we’ll develop our chatbot based on aiflows/ChatFlowModule thanks to the modularity of Flows:

+
dependencies = [
+   {"url": "aiflows/ChatFlowModule", "revision": "main"},
+]
+from aiflows import flow_verse
+flow_verse.sync_dependencies(dependencies)
+
+from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow
+
+class EconomicExpertBot(ChatAtomicFlow):
+   def __init__(self, **kwargs):
+	   super().__init__(**kwargs)
+
+
+

We recommend to associate your flow with a default yaml file as the default config. This default config will serve as a clear spec of the Flow class. For example, in our case:

+
name: "EconomicExpertBot"
+description: "A chatbot which answers questions about the economy."
+
+input_interface:
+ - "query"
+
+output_interface:
+ - "response"
+
+system_message_prompt_template:
+ _target_: aiflows.prompt_template.JinjaPrompt
+ template: |2-
+   You are an expertise in finance, economy and investment. When you explain something, you always provide associated statistical numbers, source of the information and concrete examples. You tend to explain things in a step-by-step fashion to help the reader to understand. You are also proficient in both English and Chinese. You can answer questions fluently in both languages.
+
+ input_variables: []
+
+
+

This explicitly informs potential users about the input_interface and output_interface, which can be seen as the interface of our Flow. Since we’re inheriting from aiflows/ChatFlowModule.ChatAtomicFlow, we also inherit the default config from it. Therefore, our default config can be succinct and only needs to tweak some essential parameters.

+

Note that a flow module should ideally be a self-contained python module. Therefore, it’s best to use relative import inside your code such that other users can use your flow instantly.

+
+
+

Testing Your Own Flow Module

+

So far so good, we have created our own flow. Let’s now try to test it:

+
dependencies = [
+    {"url": "yeeef/UsefulChatBots", "revision": "PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots"},
+]
+from aiflows import flow_verse
+flow_verse.sync_dependencies(dependencies)
+
+import os
+
+from flow_modules.yeeef.UsefulChatBots.EconomicExpertBot import EconomicExpertBot
+from aiflows.flow_launchers import FlowLauncher
+
+
+if __name__ == "__main__":
+    # ~~~ Set the API information ~~~
+    # OpenAI backend
+
+    api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))]
+
+    overrides = { "backend": {"api_infos": : api_information}}
+
+    bot = EconomicExpertBot.instantiate_from_default_config(**overrides)
+    # the data points in inputs must satisfy the requirements of input_keys
+    data = [
+        {
+            "id": 0, "query": "What is CPI? What is the current CPI in the US?",
+        },
+    ]
+    print(f"inputs: {data}")
+
+    # init a minimal flow_launcher without specifying the output_keys, then
+    # the full output_keys will be given
+    outputs = FlowLauncher.launch(
+        flow_with_interfaces={"flow": data},
+        data=inputs,
+    )
+    print(outputs)
+
+
+

As we are developing locally, the remote revision does not exist yet, so we point the revision to the local path we just created: PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots. Note that when we sync a local revision, instead of copying the files locally, we make a symbolic soft link. So you could just modify the code under flow_modules and the changes will be automatically propagated to the PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots.

+

We also specify the namespace of our flow module: yeeef/UsefulChatBots. yeeef is my HuggingFace username, and you should replace it with your own Hugging Face username. Note that this url could be arbitrary as it does not exist online yet, but we highly recommend that the namespace of the flow module be consistent with your HuggingFace username, such that publishing it later will be seamless.

+

Then let’s execute the code and test our new flow:

+
(aiflows) ➜  dev-tutorial python ask_economic_expert_bot.py
+inputs: [{'id': 0, 'query': 'What is CPI? What is the current CPI in the US?'}]
+[2023-07-05 17:05:35,530][aiflows.base_flows.abstract][WARNING] - The raw response was not logged.
+[{'id': 0, 'inference_outputs': [OutputMessage(message_id='d95683d6-9507-4a90-b290-6a43e609c904', created_at='2023-07-05 09:05:35.530972000', created_by='EconomicExpertBot', message_type='OutputMessage', data={'output_keys': ['response'], 'output_data': {'response': 'CPI, or the Consumer Price Index, is a measure that examines the weighted average of prices of a basket of consumer goods and services, such as transportation, food, and medical care. It is calculated by taking price changes for each item in the predetermined basket of goods and averaging them. Changes in the CPI are used to assess price changes associated with the cost of living.'}, 'missing_output_keys': []}, private_keys=['api_keys'])], 'error': None}]
+
+
+

Looks good! Now let’s publish it to the huggingface!

+
+
+

Publishing Your Flow Module

+

Start by creating a new model on Hugging Face and it will be best to allign with the namespace when we are testing: yeeef/UsefulChatBots. Then press the botton Create model. +aligning it with the namespace used during testing: yeeef/UsefulChatBots. Click the Create model button to create the model.

+

+

Then, you can either upload the files manually through the Hugging Face webpage or push your changes to the remote:

+
(aiflows)   dev-tutorial cd PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots
+(aiflows)   dev_UsefulChatBots git:(main)  git remote add origin https://huggingface.co/yeeef/UsefulChatBots
+(aiflows)   dev_UsefulChatBots git:(main)  git pull -r origin main
+(aiflows)   dev_UsefulChatBots git:(main)  git push --set-upstream origin main
+
+
+

Congratulations! You now have your remote module online, available for everyone to use!

+

+
+
+
+

Contributing to an Existing Flow

+

In this tutorial, we continue to use the trivial_sync_demo.py (see Flow Module Management) script. As the dependencies are synced to your root directory, you can instantly modify the synced flow module according to your needs. Once you’ve made enough changes and feel ready to make a Pull Request (PR), you simply need to push your changes to the Hugging Face repository and create the PR.

+

For instance, let’s say we want to update the dependency of nbaldwin/ChatInteractiveFlowModule to the latest version of aiflows/ChatAtomicFlow:

+
dependencies = [
+    {"url": "aiflows/ChatFlowModule", "revision": "main"} # cae3fdf2f0ef7f28127cf4bc35ce985c5fc4d19a -> main
+]
+from aiflows import flow_verse
+flow_verse.sync_dependencies(dependencies)
+
+from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow
+
+class ChatHumanFlowModule(ChatAtomicFlow):
+    def __init__(self, **kwargs):
+        
+        ##SOME CODE
+
+
+

Firstly, navigate to the synced folder, initialize a git repository, and commit your changes:

+
(aiflows) ➜  dev-tutorial cd flow_modules/nbaldwin/ChatInteractiveFlowModule
+(aiflows) ➜  ChatInteractiveFlowModule git init
+Initialized empty Git repository in /Users/yeeef/Desktop/dlab-ra/dev-tutorial/flow_modules/nbaldwin/ChatInteractiveFlowModule/.git/
+(aiflows) ➜  ChatInteractiveFlowModule git:(main) ✗ git add .
+(aiflows) ➜  ChatInteractiveFlowModule git:(main) ✗ git commit -m "Change the dependency revision to main"
+[main d7465df] Change the dependency revision to main
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+
+

Next, you need to open a PR on the target Hugging Face repository. Navigate to Community and click on New pull request.

+

+

Enter a brief description for your PR branch and click on Create PR branch.

+

+

Once your PR branch has been created (for instance, pr/2), you’ll need to push your changes to this branch:

+
(aiflows) ➜  ChatInteractiveFlowModule git:(main) git checkout -b pr/2
+Switched to a new branch 'pr/2'
+(aiflows) ➜  ChatInteractiveFlowModule git:(pr/2) git remote add origin https://huggingface.co/nbaldwin/ChatInteractiveFlowModule
+(aiflows) ➜  ChatInteractiveFlowModule git:(pr/2) git pull -r origin pr/2
+(aiflows) ➜  ChatInteractiveFlowModule git:(pr/2) git push origin pr/2:pr/2
+Enumerating objects: 11, done.
+Counting objects: 100% (11/11), done.
+Delta compression using up to 10 threads
+Compressing objects: 100% (8/8), done.
+Writing objects: 100% (8/8), 952 bytes | 952.00 KiB/s, done.
+Total 8 (delta 5), reused 0 (delta 0), pack-reused
+
+ 0
+To https://huggingface.co/nbaldwin/ChatInteractiveFlowModule
+   1849a87..1818057  pr/2 -> refs/pr/2
+
+
+

Finally, review your PR changes on the Hugging Face PR page and click the Publish button to finalize your submission.

+

+
+
+

Develop Over an Existing Flow and Publish it Under Your Namespace

+

As a Flow developer, you can easily develop based on any synced flow modules. However, instead of making a PR to the original repository, you may wish to publish it under your own namespace. This can be the case if you’ve made substantial changes that the original author might not prefer.

+

Let’s get back to our trivial_sync_demo, where we leverage nbaldwin/ChatInteractiveFlowModule. We have made some changes to it and want to publish it on our own as yeeef/MyChatInteractiveFlowModule. To do this, we recommend following steps:

+

Step 1: Manually copy the modified flow module out of the flow_modules directory:

+
(aiflows)   dev-tutorial cp -r ./flow_modules/nbaldwin/ChatInteractiveFlowModules PATH_TO_LOCAL_DEV_DIRECTORY/MyChatInteractiveFlowModules
+
+
+

Step 2: Next, we can treat it as a local file directory and sync it with a local revision:

+
dependencies = [
+    {"url": "nbaldwin/ChatInteractiveFlowModules", "revision": "main"},
+    {"url": "yeeef/MyChatInteractiveFlowModule", "revision": "PATH_TO_LOCAL_DEV_DIRECTORY/MyChatInteractiveFlowModules"},
+
+]
+from aiflows import flow_verse
+flow_verse.sync_dependencies(dependencies)
+
+from flow_modules.nbaldwin.ChatInteractiveFlowModules import ChatHumanFlowModule
+from flow_modules.yeeef.MyChatInteractiveFlowModules import MyChatInteractiveFlowModules
+
+if __name__ == "__main__":
+	print("it is a trivial sync demo")
+
+
+

Step 3: Finally, follow the procedure outlined in this section, and you are good to go!

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/getting_started/index.html b/docs/built_with_sphinx/html/getting_started/index.html new file mode 100644 index 0000000..14107ba --- /dev/null +++ b/docs/built_with_sphinx/html/getting_started/index.html @@ -0,0 +1,178 @@ + + + + + + + + + +Getting Started | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Getting Started

+
+

Quick start (🕓 5 min)

+

Here, you’ll see how you can run inference with your first question-answering Flow, and you can trivially change between vastly different question-answering Flows thanks to the modular abstraction and FlowVerse!

+
+
+

Tutorial (🕓 20 min)

+

In this tutorial, we introduce you to the library’s features through a walkthrough of how to build useful Flows of gradually increasing complexity. Starting from a vanilla QA Flow, we’ll first extend it to a ReAct Flow, then ReAct with human feedback, and finish the tutorial with a version of AutoGPT!

+
+
+

Developer’s Guide (🕓 10 min)

+

We are constantly optimizing our Flow development workflow (pun intended:). In this short guide, we share our best tips so that you don’t have to learn the hard way.

+
+
+

Detailed Examples

+

Many of the recently proposed prompting and collaboration strategies involving tools, humans, and AI models are, in essence, specific Flows (see the figure below). In the link above, you’ll find a detailed walkthrough of how to build some representative workflows.

+

The Flows framework exemplified. +The Flows framework exemplified. The first column depicts examples of tools. Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools, constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between Atomic or Composite Flows. The fourth column illustrates a specific Composite competitive coding Flow as those used in the experiments in the paper. The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior.

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/index.html b/docs/built_with_sphinx/html/index.html new file mode 100644 index 0000000..ac40ea0 --- /dev/null +++ b/docs/built_with_sphinx/html/index.html @@ -0,0 +1,204 @@ + + + + + + + + + +Introduction | aiFlows documentation + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+
+
+

Introduction

+ +
+image +
+

🤖🌊 aiFlows embodies the Flows (arXiv) abstraction and greatly simplifies the design and +implementation of complex (work)Flows involving humans, AI systems, and tools. It enables:

+
    +
  • 🧩 Modularity: Flows can be stacked like LEGO blocks into arbitrarily nested structures with the complexity hidden behind a message-based interface

  • +
  • 🤝 Reusability: Flows can be shared publicly on the FlowVerse, readily downloaded and reused as part of different Flows

  • +
  • 🔀 Concurrency: Being consistent with the Actor model of concurrent computation, Flows are concurrency friendly – a necessary feature for a multi-agent future

  • +
+
+

Flows in a Nutshell

+

The framework is centered around Flows and messages. +Flows represent the fundamental building block of computation. They are independent, self-contained, goal-driven entities able to complete a semantically meaningful unit of work. +To exchange information, Flows communicate via a standardized message-based interface. Messages can be of any type the recipient Flow can process.

+
+image +
+

The Flows framework exemplified. The first column depicts examples of tools. Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between Atomic or Composite Flows. The fourth column illustrates a specific Composite competitive coding Flow as those used in the experiments in the paper. The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior.

+
+
+
+
+

FlowVerse in a Nutshell

+

The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, Flows can be readily downloaded, used, extended, or composed into novel, more complex For instance, sharing a Flow that uses only API-based tools (tools subsume models in the Flows abstraction) is as simple as sharing a config file. As an example, here is the AutoGPT Flow on FlowVerse. For the ones using ChatGPT, you could think of them as completely customizable open-source GPTs(++).

+

The FlowVerse is continuously growing. To explore the currently available Flows, check out the FlowVerse Forum on the Discord channel. Additionally, the Tutorials and Detailed Examples in the Getting Started sections cover some of the Flows we provide in more detail (e.g., the ChatAtomicFlow and QA, VisionAtomicFlow and VisualQA, ReAct and ReAct with human feedback, AutoGPT, etc.).

+
+
+

Why should I use aiFlows?

+

AI is set to revolutionize the way we work. Our mission is to support AI researchers and to allow them to seamlessly share advancements with practitioners. This will establish a feedback loop, guiding progress toward beneficial directions while ensuring that everyone can freely access and benefit from the next-generation AI tools.

+
+

As a researcher, you will benefit from:

+
    +
  • The ability to design, implement, and study arbitrarily complex interactions

  • +
  • Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.).

  • +
  • The ability to readily reproduce, reuse, or build on top of Flows shared on the FlowVerse and systematically study them across different settings (the infrastructure in the cc_flows repository could be a useful starting point in future studies).

  • +
  • The ability to readily make your work accessible to practitioners and other researchers and access their feedback.

  • +
+
+
+

As a practitioner, you will benefit from:

+
    +
  • The ability to design and implement arbitrarily complex interactions.

  • +
  • Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.).

  • +
  • The ability to readily reuse or build on top of Flows shared on the FlowVerse.

  • +
  • Direct access to any advancements in the field.

  • +
+

To develop the next-generation AI tools and simultaneously maximize the benefits, developers and researchers need to have complete control over their workflows. aiFlows strives to empower you to make each Flow your own! See the contribute section for more information.

+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/installation/index.html b/docs/built_with_sphinx/html/installation/index.html new file mode 100644 index 0000000..85f6981 --- /dev/null +++ b/docs/built_with_sphinx/html/installation/index.html @@ -0,0 +1,175 @@ + + + + + + + + + +Installation | aiFlows documentation + + + + + + + + + + + + + +
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/introduction/index.html b/docs/built_with_sphinx/html/introduction/index.html new file mode 100644 index 0000000..d2575cd --- /dev/null +++ b/docs/built_with_sphinx/html/introduction/index.html @@ -0,0 +1,212 @@ + + + + + + + + + +Introduction | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

Introduction

+
+image +
+

🤖🌊 aiFlows embodies the Flows (arXiv) abstraction and greatly simplifies the design and implementation of complex (work)Flows involving humans, AI systems, and tools. It enables:

+
    +
  • 🧩 Modularity: Flows can be stacked like LEGO blocks into arbitrarily nested structures with the complexity hidden behind a message-based interface

  • +
  • 🤝 Reusability: Flows can be shared publicly on the FlowVerse, readily downloaded and reused as part of different Flows

  • +
  • 🔀 Concurrency: Being consistent with the Actor model of concurrent computation, Flows are concurrency friendly – a necessary feature for a multi-agent future

  • +
+
+

Flows in a Nutshell

+

The framework is centered around Flows and messages. +Flows represent the fundamental building block of computation. They are independent, self-contained, goal-driven entities able to complete a semantically meaningful unit of work. +To exchange information, Flows communicate via a standardized message-based interface. Messages can be of any type the recipient Flow can process.

+
+image +
+

The Flows framework exemplified. The first column depicts examples of tools. Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between Atomic or Composite Flows. The fourth column illustrates a specific Composite competitive coding Flow as those used in the experiments in the paper. The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior.

+
+
+
+
+

FlowVerse in a Nutshell

+

The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, Flows can be readily downloaded, used, extended, or composed into novel, more complex Flows. For instance, sharing a Flow that uses only API-based tools (tools subsume models in the Flows abstraction) is as simple as sharing a config file (e.g., here is the AutoGPT Flow on FlowVerse). For the ones using ChatGPT, you could think of them as completely customizable open-source GPTs(++).

+

The FlowVerse is continuously growing. To explore the currently available Flows, check out the FlowVerse Forum on the Discord channel. Additionally, the Tutorials and Detailed Examples in the Getting Started sections cover some of the Flows we provide in more detail (e.g., the ChatAtomicFlow and QA, VisionAtomicFlow and VisualQA, ReAct and ReAct with human feedback, AutoGPT, etc.).

+
+
+

Why should I use aiFlows?

+

AI is set to revolutionize the way we work. Our mission is to support AI researchers and to allow them to seamlessly share advancements with practitioners. This will establish a feedback loop, guiding progress toward beneficial directions while ensuring that everyone can freely access and benefit from the next-generation AI tools.

+
+

As a researcher, you will benefit from:

+
    +
  • The ability to design, implement, and study arbitrarily complex interactions

  • +
  • Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.).

  • +
  • The ability to readily reproduce, reuse, or build on top of Flows shared on the FlowVerse and systematically study them across different settings (the infrastructure in the cc_flows repository could be a useful starting point in future studies).

  • +
  • The ability to readily make your work accessible to practitioners and other researchers and access their feedback.

  • +
+
+
+

As a practitioner, you will benefit from:

+
    +
  • The ability to design and implement arbitrarily complex interactions.

  • +
  • Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.).

  • +
  • The ability to readily reuse or build on top of Flows shared on the FlowVerse.

  • +
  • Direct access to any advancements in the field.

  • +
+

To develop the next-generation AI tools and simultaneously maximize the benefits, developers and researchers need to have complete control over their workflows. aiFlows strives to empower you to make each Flow your own! See the contribute section for more information.

+
+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/objects.inv b/docs/built_with_sphinx/html/objects.inv new file mode 100644 index 0000000..4c2693f Binary files /dev/null and b/docs/built_with_sphinx/html/objects.inv differ diff --git a/docs/built_with_sphinx/html/py-modindex.html b/docs/built_with_sphinx/html/py-modindex.html new file mode 100644 index 0000000..5924ce6 --- /dev/null +++ b/docs/built_with_sphinx/html/py-modindex.html @@ -0,0 +1,397 @@ + + + + + + + + +Python Module Index | aiFlows documentation + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+

Python Module Index

+
+a +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+a
+aiflows +
    + aiflows.backends +
    + aiflows.backends.api_info +
    + aiflows.backends.llm_lite +
    + aiflows.base_flows +
    + aiflows.base_flows.abstract +
    + aiflows.base_flows.atomic +
    + aiflows.base_flows.branching +
    + aiflows.base_flows.circular +
    + aiflows.base_flows.composite +
    + aiflows.base_flows.sequential +
    + aiflows.data_transformations +
    + aiflows.data_transformations.abstract +
    + aiflows.data_transformations.end_of_interaction +
    + aiflows.data_transformations.json +
    + aiflows.data_transformations.key_copy +
    + aiflows.data_transformations.key_delete +
    + aiflows.data_transformations.key_match_input +
    + aiflows.data_transformations.key_rename +
    + aiflows.data_transformations.key_select +
    + aiflows.data_transformations.key_set +
    + aiflows.data_transformations.print_previous_messages +
    + aiflows.data_transformations.regex_extractor_first +
    + aiflows.data_transformations.unnesting_dict +
    + aiflows.datasets +
    + aiflows.datasets.abstract +
    + aiflows.datasets.demonstrations_11 +
    + aiflows.datasets.outputs +
    + aiflows.flow_cache +
    + aiflows.flow_cache.flow_cache +
    + aiflows.flow_launchers +
    + aiflows.flow_launchers.abstract +
    + aiflows.flow_launchers.flow_API_launcher +
    + aiflows.flow_verse +
    + aiflows.flow_verse.loading +
    + aiflows.flow_verse.utils +
    + aiflows.history +
    + aiflows.history.flow_history +
    + aiflows.interfaces +
    + aiflows.interfaces.abstract +
    + aiflows.interfaces.key_interface +
    + aiflows.messages +
    + aiflows.messages.abstract +
    + aiflows.messages.flow_message +
    + aiflows.prompt_template +
    + aiflows.prompt_template.jinja2_prompts +
    + aiflows.utils +
    + aiflows.utils.general_helpers +
    + aiflows.utils.io_utils +
    + aiflows.utils.logging +
    + aiflows.utils.rich_utils +
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/search.html b/docs/built_with_sphinx/html/search.html new file mode 100644 index 0000000..7a9837a --- /dev/null +++ b/docs/built_with_sphinx/html/search.html @@ -0,0 +1,152 @@ + + + + + + + + +Search | aiFlows documentation + + + + + + + + + + + +
+ + + + + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/searchindex.js b/docs/built_with_sphinx/html/searchindex.js new file mode 100644 index 0000000..205d5bb --- /dev/null +++ b/docs/built_with_sphinx/html/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["citation/index", "contributing_info/automating_documentation_flow", "contributing_info/coding_standards", "contributing_info/contribute_index", "contributing_info/contributing_to_FlowVerse", "contributing_info/contributing_to_aiFlows", "contributing_info/finding_collaborators", "contributing_info/index", "contributing_info/license_info", "contributing_info/recognition_info", "getting_started/Quick_Start/quick_start", "getting_started/Tutorial/atomic_flow", "getting_started/Tutorial/autogpt_tutorial", "getting_started/Tutorial/composite_flow", "getting_started/Tutorial/intro_to_FlowVerse_minimalQA", "getting_started/Tutorial/reAct", "getting_started/Tutorial/reActwHumanFeedback", "getting_started/Tutorial/setting_up_aiFlows", "getting_started/Tutorial/tutorial_landing_page", "getting_started/detailed_examples/autogpt", "getting_started/detailed_examples/chat_flow", "getting_started/detailed_examples/detailed_example_landing_page", "getting_started/detailed_examples/vision_flow", "getting_started/developer_guide/developper_guide_landing_page", "getting_started/developer_guide/flow_module_management", "getting_started/developer_guide/typical_developer_workflows", "getting_started/index", "index", "installation/index", "introduction/index", "source/aiflows", "source/aiflows.backends", "source/aiflows.base_flows", "source/aiflows.data_transformations", "source/aiflows.datasets", "source/aiflows.flow_cache", "source/aiflows.flow_launchers", "source/aiflows.flow_verse", "source/aiflows.history", "source/aiflows.interfaces", "source/aiflows.messages", "source/aiflows.prompt_template", "source/aiflows.utils", "source/modules"], "filenames": ["citation/index.md", "contributing_info/automating_documentation_flow.rst", "contributing_info/coding_standards.rst", "contributing_info/contribute_index.rst", "contributing_info/contributing_to_FlowVerse.rst", "contributing_info/contributing_to_aiFlows.rst", "contributing_info/finding_collaborators.rst", "contributing_info/index.rst", "contributing_info/license_info.rst", "contributing_info/recognition_info.rst", "getting_started/Quick_Start/quick_start.md", "getting_started/Tutorial/atomic_flow.md", "getting_started/Tutorial/autogpt_tutorial.md", "getting_started/Tutorial/composite_flow.md", "getting_started/Tutorial/intro_to_FlowVerse_minimalQA.md", "getting_started/Tutorial/reAct.md", "getting_started/Tutorial/reActwHumanFeedback.md", "getting_started/Tutorial/setting_up_aiFlows.md", "getting_started/Tutorial/tutorial_landing_page.md", "getting_started/detailed_examples/autogpt.md", "getting_started/detailed_examples/chat_flow.md", "getting_started/detailed_examples/detailed_example_landing_page.md", "getting_started/detailed_examples/vision_flow.md", "getting_started/developer_guide/developper_guide_landing_page.md", "getting_started/developer_guide/flow_module_management.md", "getting_started/developer_guide/typical_developer_workflows.md", "getting_started/index.md", "index.rst", "installation/index.rst", "introduction/index.rst", "source/aiflows.rst", "source/aiflows.backends.rst", "source/aiflows.base_flows.rst", "source/aiflows.data_transformations.rst", "source/aiflows.datasets.rst", "source/aiflows.flow_cache.rst", "source/aiflows.flow_launchers.rst", "source/aiflows.flow_verse.rst", "source/aiflows.history.rst", "source/aiflows.interfaces.rst", "source/aiflows.messages.rst", "source/aiflows.prompt_template.rst", "source/aiflows.utils.rst", "source/modules.rst"], "titles": ["Citation", "Automating the documentation of a Flow on the FlowVerse", "Coding Standards", "Contribution Guide", "Recommended Workflow for Contributing to a Flow on the FlowVerse", "Contributing to aiFlows Library (for bug fixes and adding features)", "Looking for Collaborators ?", "Contribute", "Licence Info: Frequently Asked Questions", "Publicizing Your Work", "Quick Start", "Atomic Flow Tutorial", "AutoGPT Tutorial", "Composite Flow Tutorial", "Introducing the FlowVerse with a Simple Q&A Flow", "ReAct Tutorial", "ReAct With Human Feedback Tutorial", "Setting up aiFlows", "Tutorials", "AutoGPT", "ChatAtomicFlow", "Detailed Examples", "Vision Atomic Flow", "Developer\u2019s Guide", "Flow Module Management", "Typical Developer Workflows", "Getting Started", "Introduction", "Installation", "Introduction", "aiflows package", "aiflows.backends package", "aiflows.base_flows package", "aiflows.data_transformations package", "aiflows.datasets package", "aiflows.flow_cache package", "aiflows.flow_launchers package", "aiflows.flow_verse package", "aiflows.history package", "aiflows.interfaces package", "aiflows.messages package", "aiflows.prompt_template package", "aiflows.utils package", "aiflows"], "terms": {"To": [0, 9, 10, 12, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29], "refer": [0, 1, 4, 5, 12, 14, 15, 16, 19, 20, 22, 42], "aiflow": [0, 2, 7, 10, 11, 12, 13, 14, 15, 16, 19, 20, 21, 23, 24, 25, 28], "librari": [0, 2, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 26, 28, 31, 42], "now": [0, 10, 11, 12, 13, 14, 15, 16, 17, 25], "pleas": [0, 12, 16, 19, 42], "cite": 0, "paper": [0, 11, 12, 13, 16, 26, 27, 29], "flow": [0, 6, 7, 9, 17, 20, 21, 26, 30, 32, 33, 35, 36, 37, 38, 40, 42], "build": [0, 3, 7, 9, 18, 21, 23, 25, 26, 27, 29, 30, 32, 37, 40], "block": [0, 27, 29], "reason": [0, 12, 14, 15, 16, 19, 26, 27, 29], "collabor": [0, 3, 4, 5, 8, 9, 17, 18, 26], "ai": [0, 3, 7, 14, 17, 19, 26, 27, 29], "misc": 0, "josifoski2023flow": 0, "titl": [0, 5, 12, 16], "author": [0, 25], "martin": 0, "josifoski": 0, "lar": 0, "klein": 0, "maxim": [0, 12, 16, 27, 29], "peyrard": 0, "yifei": 0, "li": 0, "saibo": 0, "geng": 0, "julian": 0, "paul": 0, "schnitzler": 0, "yux": 0, "yao": 0, "jiheng": 0, "wei": 0, "debjit": 0, "robert": 0, "west": 0, "year": [0, 12, 16], "2023": [0, 12, 14, 15, 16, 25], "eprint": 0, "2308": 0, "01285": 0, "archiveprefix": 0, "arxiv": [0, 27, 29], "primaryclass": 0, "c": 0, "i": [1, 2, 3, 4, 5, 7, 19, 20, 22, 31, 32, 33, 35, 36, 37, 39, 40, 41, 42], "crucial": [1, 5, 13, 16], "step": [1, 4, 12, 13, 15, 16, 19, 25], "ensur": [1, 2, 3, 4, 5, 11, 13, 17, 19, 24, 27, 29], "clariti": [1, 24], "access": [1, 2, 3, 7, 8, 11, 12, 13, 14, 16, 17, 19, 27, 29, 42], "let": [1, 3, 4, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 25], "": [1, 2, 4, 7, 8, 19, 20, 22, 32, 37, 40, 42], "explor": [1, 3, 4, 9, 10, 12, 14, 16, 17, 18, 19, 20, 21, 22, 23, 27, 29], "an": [1, 2, 3, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 27, 31, 32, 36, 37, 40, 42], "effici": [1, 4, 5, 19], "wai": [1, 3, 7, 8, 26, 27, 29, 36], "thi": [1, 2, 3, 4, 5, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 32, 33, 35, 36, 37, 39, 40, 41, 42], "process": [1, 2, 3, 4, 5, 10, 12, 13, 15, 16, 17, 19, 20, 21, 22, 24, 26, 27, 29, 42], "us": [1, 2, 3, 4, 5, 6, 7, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 31, 32, 33, 36, 37, 39, 41, 42], "start": [1, 3, 7, 11, 12, 14, 15, 16, 25, 27, 29, 36, 42], "need": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 15, 17, 20, 24, 25, 27, 29, 36, 42], "check": [1, 2, 3, 6, 10, 11, 13, 14, 15, 27, 29, 37, 42], "out": [1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 15, 16, 25, 27, 29], "chatflowmodul": [1, 4, 10, 24, 25], "inspir": 1, "pro": 1, "tip": [1, 3, 26], "leverag": [1, 13, 14, 25], "vscode": 1, "github": [1, 3, 5, 17, 28, 42], "copilot": 1, "expedit": 1, "you": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 42], "have": [1, 2, 3, 7, 19, 26, 27, 29], "necessari": [1, 2, 5, 27, 29, 32], "tool": [1, 11, 12, 14, 15, 16, 19, 20, 26, 27, 29], "run": [1, 11, 13, 14, 18, 19, 20, 22, 26, 28, 30, 32, 36, 42], "follow": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 32, 37, 42], "command": [1, 5, 10, 12, 15, 16, 17, 19, 28], "pip": [1, 12, 15, 16, 17, 28], "go": [1, 3, 7, 12, 16, 25], "contain": [1, 14, 15, 24, 25, 27, 29, 31, 32, 35, 36, 37, 42], "file": [1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 24, 25, 27, 29, 32, 36, 37, 42], "cd": [1, 10, 11, 12, 13, 14, 15, 16, 17, 25, 28], "path_to_your_flow": 1, "gener": [1, 6, 10, 11, 14, 16, 19, 20, 22, 27, 29, 41, 42], "replac": [1, 25, 42], "name": [1, 2, 5, 10, 11, 12, 13, 14, 15, 16, 17, 19, 25, 30, 31, 32, 37, 40, 42], "exclud": [1, 14], "py": [1, 10, 11, 12, 13, 14, 15, 16, 24, 25, 37], "extens": [1, 19, 20, 22], "For": [1, 2, 4, 6, 9, 12, 14, 15, 16, 17, 19, 20, 22, 24, 25, 27, 29, 42], "exampl": [1, 2, 3, 4, 7, 10, 11, 12, 13, 14, 15, 16, 18, 20, 22, 23, 24, 25, 27, 29, 42], "flow1": [1, 4, 13, 17], "execut": [1, 10, 11, 12, 13, 14, 15, 16, 18, 19, 20, 21, 25, 32], "p": 1, "render": [1, 14, 16, 20, 41], "toc": 1, "readm": [1, 9, 14, 24], "md": [1, 12, 14, 15, 16, 24], "If": [1, 3, 4, 5, 6, 12, 15, 16, 17, 19, 20, 22, 32, 37, 42], "multipl": [1, 14, 15, 18, 22, 24, 31, 32, 36], "consid": [1, 5, 6, 9, 12, 15, 16, 19, 24], "includ": [1, 2, 5, 10, 11, 12, 13, 14, 15, 16, 19, 22, 24, 42], "all": [1, 2, 4, 8, 10, 11, 12, 13, 14, 16, 17, 19, 32, 33, 37, 39, 42], "streamlin": 1, "contribut": [1, 2, 6, 12, 15, 16, 17, 18, 23, 27, 29], "happi": [1, 2, 3, 4, 8, 9, 17], "when": [2, 5, 11, 12, 13, 14, 16, 17, 19, 20, 22, 25, 31, 32, 41, 42], "essenti": [2, 3, 14, 15, 16, 17, 18, 25], "adher": 2, "maintain": [2, 4, 12, 16], "consist": [2, 4, 24, 25, 27, 29, 32], "overal": [2, 12, 16], "qualiti": [2, 3], "codebas": [2, 3, 7, 19], "strive": [2, 4, 27, 29], "make": [2, 3, 7, 8, 10, 15, 16, 17, 18, 19, 21, 23, 25, 27, 29, 31, 36], "your": [2, 6, 7, 8, 12, 14, 15, 16, 18, 19, 21, 23, 24, 26, 27, 29], "simpl": [2, 12, 13, 15, 16, 19, 27, 29], "possibl": [2, 3, 7, 14], "clear": [2, 5, 15, 23, 24, 25, 35], "meaning": [2, 4, 27, 29], "variabl": [2, 14, 16, 17, 20, 41], "function": [2, 10, 11, 12, 14, 16, 18, 19, 20, 21, 22, 31, 32, 34, 37, 42], "avoid": 2, "unnecessari": 2, "complex": [2, 11, 13, 24, 26, 27, 29, 32], "industri": 2, "implement": [2, 4, 5, 10, 11, 12, 15, 16, 19, 20, 24, 27, 29, 32, 34, 36], "fix": [2, 8], "bug": [2, 8], "languag": [2, 10, 12, 14, 15, 16, 18, 19, 20, 25], "specif": [2, 10, 11, 12, 13, 14, 15, 16, 19, 20, 22, 26, 27, 29, 37], "convent": [2, 14], "guidelin": [2, 3, 8], "thoroughli": 2, "provid": [2, 3, 5, 6, 8, 10, 12, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 29, 36, 42], "comment": [2, 5, 12, 14, 15, 16], "where": [2, 10, 12, 14, 15, 16, 18, 19, 21, 23, 25], "explain": [2, 12, 14, 15, 16, 20, 25], "logic": [2, 11, 24], "algorithm": [2, 12, 16], "concis": [2, 5], "describ": [2, 3, 5, 7, 10, 13, 16, 19, 32], "thought": [2, 19], "new": [2, 4, 8, 12, 13, 14, 16, 25, 33, 39, 41, 42], "class": [2, 11, 13, 14, 16, 22, 24, 25, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42], "These": [2, 12, 13, 14, 15, 16, 18, 19, 21, 23], "should": [2, 3, 4, 7, 9, 10, 11, 12, 13, 16, 17, 19, 24, 25, 32, 40, 41], "purpos": [2, 16, 21], "paramet": [2, 10, 11, 12, 13, 14, 15, 16, 18, 19, 20, 22, 25, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42], "return": [2, 11, 12, 15, 16, 19, 20, 22, 31, 32, 33, 34, 35, 36, 37, 38, 40, 41, 42], "valu": [2, 13, 14, 19, 32, 33, 35, 39, 41, 42], "possibli": [2, 36], "except": [2, 42], "rais": [2, 19, 37, 42], "here": [2, 3, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 22, 26, 27, 29, 42], "def": [2, 11, 16, 19, 20, 22, 25], "example_funct": 2, "param1": 2, "param2": 2, "brief": [2, 25], "descript": [2, 5, 10, 11, 12, 13, 14, 15, 16, 19, 20, 22, 25, 32], "param": [2, 19, 20, 22, 34, 42], "first": [2, 12, 14, 15, 16, 18, 20, 24, 26, 27, 29, 33], "second": [2, 12, 13, 16, 26, 27, 29, 36], "customexcept": 2, "result": [2, 10, 12, 15, 16, 22, 33, 35, 36], "more": [2, 3, 5, 6, 7, 10, 12, 13, 15, 16, 19, 20, 22, 24, 27, 29, 32, 36], "detail": [2, 3, 4, 5, 7, 12, 13, 14, 15, 16, 19, 20, 22, 27, 29, 32, 36], "link": [2, 12, 16, 25, 26, 37], "chang": [2, 25, 26], "ar": [2, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 24, 25, 26, 27, 29, 32, 33, 40, 41, 42], "whenev": 2, "help": [2, 5, 7, 14, 25, 42], "stabil": 2, "exist": [2, 23, 24, 36, 37, 42], "user": [2, 12, 14, 15, 16, 18, 19, 20, 22, 25, 37, 40, 42], "creat": [2, 11, 12, 13, 14, 15, 16, 17, 18, 23, 24, 27, 29, 35, 36, 37, 40, 42], "comprehens": [2, 4, 12, 16, 18, 19, 21, 23], "cover": [2, 27, 29], "variou": [2, 12, 14, 15, 16, 20, 22], "scenario": [2, 14, 15, 16, 20], "edg": 2, "case": [2, 10, 11, 14, 20, 22, 25], "robust": [2, 4], "try": [2, 25], "increas": [2, 26], "ad": [2, 3, 14, 16, 19, 20, 36, 37], "modifi": [2, 8, 12, 16, 25, 37], "ones": [2, 10, 24, 27, 29], "aim": [2, 4, 19], "high": [2, 3], "percentag": 2, "catch": 2, "potenti": [2, 3, 19, 20, 22, 24, 25], "issu": [2, 3, 12, 16], "earli": [2, 19], "introduc": [2, 4, 12, 13, 15, 16, 21, 23, 26], "correspond": [2, 11, 13, 14, 15, 19, 24, 26, 27, 29], "everi": [2, 19, 20, 42], "updat": [2, 5, 14, 19, 20, 22, 25, 40, 42], "dedic": [2, 4, 5, 12, 15, 16, 17, 19], "greatli": [2, 3, 7, 27, 29], "appreci": [2, 3, 5, 7], "friendli": [2, 27, 29], "entir": [2, 9, 12, 16, 19], "commun": [2, 3, 7, 8, 10, 12, 14, 16, 24, 25, 27, 29], "onc": [2, 12, 16, 25, 42], "again": [2, 12, 13, 16], "thank": [2, 5, 10, 13, 17, 25, 26], "being": [2, 12, 14, 16, 19], "member": [2, 5, 10, 12, 16], "our": [2, 3, 4, 5, 7, 10, 11, 12, 13, 14, 15, 16, 17, 19, 24, 25, 26, 27, 29, 34], "commit": [2, 4, 14, 25, 37], "even": [2, 8], "better": [2, 8], "whether": [3, 4, 8, 13, 16, 20, 31, 32, 33, 35, 36, 37, 42], "re": [3, 4, 5, 6, 10, 12, 14, 15, 17, 21, 25], "interest": [3, 5, 12, 16, 21], "document": [3, 5, 17, 20, 21, 22], "ll": [3, 10, 12, 14, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26], "find": [3, 7, 9, 10, 12, 14, 15, 16, 19, 20, 22, 24, 26, 42], "resourc": [3, 19, 21], "know": [3, 4], "what": [3, 7, 8, 10, 20, 24, 25, 42], "look": [3, 4, 12, 14, 15, 16, 17, 19, 20, 25, 42], "jump": 3, "right": [3, 17], "section": [3, 9, 19, 20, 22, 25, 27, 29], "below": [3, 10, 11, 12, 13, 14, 15, 16, 19, 26, 41], "recommend": [3, 14, 16, 17, 18, 21, 23, 25, 42], "workflow": [3, 7, 17, 19, 24, 26, 27, 29], "info": [3, 12, 16, 19, 42], "frequent": 3, "ask": [3, 5, 16, 19, 20, 42], "question": [3, 10, 12, 14, 15, 16, 20, 25, 26], "work": [3, 4, 5, 7, 12, 13, 16, 17, 27, 29], "goal": [3, 7, 12, 13, 15, 16, 19, 27, 29, 32], "driven": [3, 7, 12, 16, 27, 29], "benefit": [3, 7, 9], "research": [3, 6, 7, 8, 12, 13, 16], "develop": [3, 5, 6, 7, 8, 12, 13, 14, 15, 16, 18, 24, 27, 29], "alik": [3, 7], "see": [3, 7, 8, 10, 12, 14, 15, 16, 20, 25, 26, 27, 29, 32, 36, 41], "why": [3, 7, 14], "achiev": [3, 7, 11, 12, 14, 16, 20], "we": [3, 5, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 29, 42], "can": [3, 5, 7, 10, 11, 12, 13, 14, 15, 16, 19, 20, 22, 24, 25, 26, 27, 29, 31, 32, 33, 36, 40, 41, 42], "becom": [3, 7, 12, 14, 16, 19], "part": [3, 7, 9, 12, 13, 16, 19, 27, 29], "few": [3, 7, 11], "directli": [3, 7, 42], "improv": [3, 7, 8], "everyon": [3, 7, 14, 24, 25, 27, 29], "other": [3, 5, 7, 11, 12, 13, 14, 15, 16, 23, 24, 25, 27, 29, 41, 42], "might": [3, 6, 7, 10, 15, 25], "creativ": [3, 4, 7], "push": [3, 7, 25], "its": [3, 7, 10, 11, 12, 14, 15, 16, 18, 19, 20, 21, 22, 32, 42], "limit": [3, 7, 12, 16, 18], "feedback": [3, 4, 7, 12, 15, 26, 27, 29, 42], "proof": [3, 7], "pud": [3, 7], "eat": [3, 7], "best": [3, 5, 7, 12, 16, 19, 25, 26], "identifi": [3, 7, 21, 24], "promis": [3, 7], "direct": [3, 6, 7, 27, 29], "well": [3, 7, 12, 16, 32], "import": [3, 7, 10, 12, 13, 14, 15, 16, 24, 25], "miss": [3, 7], "experi": [3, 4, 7, 10, 11, 18, 21, 23, 26, 27, 29], "last": [3, 7, 12, 16, 19, 33, 38], "least": [3, 7, 19], "star": [3, 12, 16], "repositori": [3, 7, 10, 24, 25, 27, 29, 37], "shout": 3, "friend": [3, 6, 7], "colleagu": [3, 7], "spread": [3, 7], "word": [3, 7], "love": [3, 7], "support": [3, 6, 7, 10, 12, 14, 16, 19, 20, 22, 26, 27, 29], "also": [3, 4, 5, 6, 7, 9, 11, 12, 13, 16, 19, 20, 21, 22, 25], "lead": [3, 6, 7, 12, 16, 19], "In": [3, 5, 7, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 22, 24, 25, 26], "come": [3, 7, 10, 14], "week": [3, 7], "roadmap": [3, 7], "flowviz": [3, 7], "flowstudio": [3, 7], "flexibl": [3, 7], "concurr": [3, 7, 27, 29], "etc": [3, 7, 27, 29, 42], "would": [3, 7, 10, 11, 14, 16], "write": [3, 7, 12, 17, 19, 25, 36, 37, 42], "up": [3, 7, 12, 14, 15, 16, 25, 32], "outlin": [3, 7, 11, 13, 14, 25, 26, 27, 29], "idea": [3, 4, 5, 7], "long": [3, 7, 12, 16, 18, 19], "term": [3, 7, 12, 15, 16, 18, 19], "vision": [3, 7], "encourag": [3, 7, 8, 9, 14], "pick": [3, 7, 12, 16], "ani": [3, 7, 10, 11, 13, 16, 19, 20, 22, 24, 25, 27, 29, 31, 32, 35, 36, 39, 40, 41, 42], "them": [3, 7, 10, 12, 15, 16, 17, 24, 25, 27, 29, 31], "whatev": [3, 7], "fit": [3, 7, 8], "version": [3, 7, 24, 25, 26, 31], "jarvi": [3, 7], "fulli": [3, 7, 11], "customiz": [3, 7, 27, 29], "chatgpt": [3, 7, 24, 27, 29], "which": [3, 4, 7, 10, 12, 13, 14, 15, 16, 17, 19, 20, 22, 24, 25], "continu": [3, 7, 19, 25, 27, 29], "hope": [3, 7], "excit": [3, 7, 10, 18, 21, 23], "much": [3, 7], "u": [3, 7, 25, 42], "one": [3, 4, 5, 7, 12, 13, 14, 16, 17, 18, 20, 21, 22, 23, 24, 31, 32, 33, 40, 41, 42], "those": [3, 4, 7, 20, 22, 26, 27, 29], "constantli": [3, 7, 19, 26], "boundari": [3, 7], "tri": [3, 7], "anyon": [3, 7], "envis": [3, 7], "hear": [3, 7], "discord": [3, 6, 7, 9, 27, 29], "server": [3, 7], "alreadi": [3, 4, 5, 7, 12, 13, 16, 17, 31, 37], "ha": [3, 7, 10, 12, 15, 16, 17, 19, 20, 25, 32, 33, 41, 42], "channel": [3, 5, 6, 7, 9, 27, 29], "nutshel": [3, 7], "just": [3, 6, 7, 10, 20, 25], "begin": [3, 6, 7, 12, 14, 15, 16, 17], "stai": [3, 7], "tune": [3, 7], "great": [3, 6, 7], "futur": [3, 7, 14, 27, 29], "togeth": [3, 7], "guidanc": 3, "dream": [3, 6, 12, 16], "team": [3, 6, 12, 16], "report": [3, 12, 16], "discuss": [3, 4], "pull": [3, 14, 15, 18, 23, 24, 25], "request": [3, 25, 36], "matter": [3, 4, 9], "from": [3, 4, 5, 12, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 26, 31, 32, 33, 34, 36, 37, 40, 41, 42], "understand": [3, 8, 13, 14, 15, 16, 18, 19, 20, 21, 22, 23, 24, 25], "publish": [3, 23, 24], "speed": 3, "review": [3, 11, 13, 19, 25], "familiar": [3, 10, 12, 14, 15, 16, 18], "yourself": 3, "befor": [3, 4, 5, 11, 12, 13, 14, 16, 33, 36, 41], "submit": 3, "learn": [3, 10, 11, 12, 14, 16, 18, 21, 23, 25, 26], "about": [3, 4, 12, 16, 25, 31, 37], "recognit": [3, 9], "acknowledg": [3, 9, 12, 18], "quick": [3, 42], "answer": [3, 10, 11, 12, 13, 14, 15, 16, 20, 22, 25, 26], "mit": [3, 8, 12, 16], "licens": 3, "charg": 3, "commerci": 3, "thrive": [3, 8], "without": [3, 8, 10, 12, 14, 15, 16, 19, 24, 25], "worri": 3, "legal": [3, 19], "stuff": 3, "initi": [4, 8, 12, 14, 16, 17, 18, 20, 22, 25], "take": [4, 11, 13, 19, 20, 25, 32, 36], "moment": 4, "similar": [4, 12, 14, 15, 16, 19, 21], "delv": [4, 12, 15, 16, 19, 20, 22], "vibrant": 4, "share": [4, 5, 6, 7, 14, 21, 22, 24, 26, 27, 29], "reach": 4, "vers": [4, 6], "desir": [4, 14, 16, 19], "doesn": [4, 5, 10], "t": [4, 5, 10, 12, 14, 16, 17, 19, 26, 42], "craft": 4, "post": [4, 6], "inform": [4, 10, 11, 12, 14, 15, 16, 19, 25, 27, 29, 31, 36, 37, 42], "serv": [4, 14, 25], "excel": 4, "platform": [4, 24], "seek": [4, 5, 6, 19], "receiv": [4, 12, 16, 40], "assist": [4, 5, 6, 14, 18, 19, 40], "showcas": [4, 5], "util": [4, 5, 9, 10, 12, 13, 14, 15, 16, 18, 19, 30, 43], "space": [4, 6], "onli": [4, 5, 9, 11, 12, 14, 15, 16, 19, 24, 25, 27, 29, 32, 33, 36, 42], "foster": [4, 5], "gather": [4, 16, 19], "insight": [4, 5, 6, 11, 12, 13, 15, 16, 18, 21], "promot": 4, "within": [4, 9, 10, 11, 12, 13, 16, 17, 18, 22, 24], "cultiv": 4, "guid": [4, 7, 10, 11, 12, 13, 14, 15, 16, 24, 27, 29], "navig": [4, 25], "intric": [4, 12, 18], "pathwai": 4, "typical_developper_workflow": 4, "tutori": [4, 5, 21, 27, 29, 42], "how": [4, 7, 10, 11, 12, 14, 15, 16, 17, 18, 21, 23, 25, 26, 32], "optim": [4, 26], "establish": [4, 13, 17, 18, 27, 29], "central": [4, 17], "folder": [4, 14, 25, 42], "hous": 4, "plan": [4, 19], "interact": [4, 5, 8, 12, 13, 14, 16, 18, 26, 27, 29, 32, 33], "structur": [4, 8, 12, 13, 18, 21, 23, 24, 26, 27, 29, 42], "like": [4, 5, 12, 14, 15, 16, 17, 19, 20, 24, 27, 29], "someth": [4, 12, 16, 25], "flow_modul": [4, 10, 12, 14, 15, 16, 17, 19, 24, 25], "flow2": [4, 13, 17], "adopt": 4, "solid": 4, "foundat": [4, 12, 16], "project": [4, 5, 7, 9], "facilit": [4, 17], "smoother": 4, "embrac": 4, "standard": [4, 5, 11, 13, 19, 27, 29], "backbon": 4, "align": [4, 5, 14, 15, 16, 25], "readabl": [4, 36], "broader": 4, "inclin": 4, "toward": [4, 12, 15, 16, 27, 29], "mai": [4, 5, 6, 16, 19, 24, 25, 42], "prove": [4, 12, 20], "benefici": [4, 27, 29], "strateg": 4, "move": [4, 12, 15, 16, 19, 41], "systemat": [4, 27, 29], "manag": [4, 14, 16, 17, 18, 19, 25], "progress": [4, 5, 15, 16, 27, 29], "advic": [4, 5], "activ": [4, 5, 8, 17, 18], "particip": [4, 8], "convers": [4, 12, 16, 19, 20, 22], "etho": 4, "rememb": [4, 5, 8, 9], "each": [4, 10, 12, 13, 15, 16, 18, 19, 21, 23, 24, 25, 27, 29, 31, 36, 42], "small": 4, "add": [4, 5, 16, 19, 20, 22, 25, 37, 38, 42], "tapestri": 4, "problem": [5, 12, 15, 16, 19], "ve": [5, 10, 12, 13, 14, 15, 16, 25], "encount": 5, "doe": [5, 15, 20, 25, 42], "addit": [5, 14, 16, 20, 22, 31, 36, 39], "express": [5, 12, 16, 19], "addition": [5, 12, 14, 15, 16, 19, 27, 29], "immedi": 5, "join": [5, 12, 13, 14, 15, 16], "debug": [5, 36, 42], "connect": [5, 13, 18], "coordin": [5, 13], "effort": [5, 9], "engag": [5, 6, 8, 9, 18, 21, 23], "valuabl": [5, 6, 9, 12, 16], "throughout": 5, "resolut": 5, "reproduc": [5, 27, 29], "templat": [5, 12, 14, 16, 19, 20, 22, 25, 41], "suggest": [5, 17, 42], "solut": [5, 12], "forum": [5, 6, 9, 27, 29], "open": [5, 7, 12, 16, 17, 24, 25, 27, 29, 37], "prevent": [5, 16, 42], "duplic": 5, "awar": 5, "ongo": 5, "environ": [5, 17, 24], "On": [5, 9], "page": [5, 19, 24, 25], "click": [5, 6, 25], "copi": [5, 10, 25, 33], "under": [5, 8, 12, 16, 24], "account": [5, 17, 18], "local": [5, 10, 13, 22, 24, 25, 37], "machin": [5, 12, 16], "git": [5, 17, 25, 28], "http": [5, 14, 17, 25], "com": [5, 17, 28], "usernam": [5, 14, 25, 37], "checkout": [5, 25], "b": [5, 12, 16, 25], "messag": [5, 11, 12, 13, 16, 19, 20, 22, 27, 29, 30, 32, 33, 35, 36, 38, 42, 43], "m": [5, 12, 14, 16, 25], "origin": [5, 16, 25], "select": [5, 12, 14, 15, 16, 19, 24, 33, 36, 39], "appropri": [5, 6], "base": [5, 6, 10, 11, 19, 25, 27, 29, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41], "compar": [5, 14, 20, 22, 37], "dropdown": 5, "sure": [5, 10, 17, 19], "keyword": [5, 19, 32, 36, 41, 42], "number": [5, 11, 12, 13, 15, 16, 19, 24, 25, 31, 32, 34, 35, 36, 38, 42], "e": [5, 11, 13, 14, 17, 19, 20, 27, 28, 29, 42], "g": [5, 11, 13, 14, 19, 20, 27, 29], "123": 5, "automat": [5, 9, 25, 31], "close": 5, "relat": [5, 12, 15, 16, 19], "merg": [5, 31, 32], "Be": 5, "adjust": 5, "unsur": 5, "clarif": 5, "immens": 5, "contributor": 5, "signific": [5, 12, 14, 15, 16, 18], "impact": 5, "want": [5, 10, 14, 15, 25, 42], "gratitud": 5, "proudli": [5, 8], "appear": [5, 10, 15, 17], "wall": 5, "lookout": 6, "tackl": [6, 12, 16], "featur": [6, 7, 8, 26, 27, 29], "head": [6, 8], "over": [6, 8, 27, 29], "highlight": 6, "skill": [6, 13, 14, 15, 18, 21, 23], "specifi": [6, 11, 12, 13, 14, 15, 16, 19, 20, 25, 37, 42], "area": 6, "could": [6, 7, 12, 16, 19, 24, 25, 26, 27, 29], "target": [6, 25], "search": [6, 11, 12, 14, 15, 16, 19, 33, 42], "special": [6, 13], "awai": 6, "resolv": [6, 42], "visit": 6, "focu": [6, 12, 16], "flowvers": [6, 7, 9, 12, 13, 15, 16, 19, 20, 21, 22, 23, 24, 26], "involv": [6, 7, 16, 19, 26, 27, 29], "queri": [6, 12, 14, 15, 16, 19, 20, 22, 25, 36], "session": 6, "fruit": 6, "input": [6, 10, 11, 13, 14, 16, 18, 19, 20, 21, 22, 25, 32, 33, 35, 36, 40, 41], "concept": [6, 11, 13, 18, 21, 23], "due": [6, 22], "constraint": [6, 19], "perfect": 6, "who": [6, 14, 15, 19], "passion": 6, "mission": [7, 27, 29], "sourc": [7, 14, 24, 25, 27, 29], "public": [7, 12, 16], "The": [7, 8, 9, 10, 11, 13, 14, 20, 22, 24, 25, 26, 27, 28, 29, 31, 32, 33, 35, 36, 37, 38, 40, 41, 42], "contr": 7, "get": [7, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 23, 25, 27, 29, 30, 31, 35, 42], "absolut": [8, 9], "oper": [8, 19], "permiss": [8, 12], "grant": 8, "freedom": 8, "distribut": [8, 12, 16], "It": [8, 10, 11, 12, 13, 14, 15, 16, 19, 20, 22, 24, 25, 27, 29, 31, 32, 33, 36, 39, 42], "Not": [8, 32, 36], "free": [8, 12, 16], "hidden": [8, 27, 29], "fee": 8, "design": [8, 12, 16, 19, 27, 29], "creation": [8, 13, 14, 16], "seamless": [8, 13, 17, 18, 25], "ye": 8, "allow": [8, 16, 27, 29, 31], "both": [8, 12, 16, 17, 24, 25, 36], "feel": [8, 25], "incorpor": [8, 13, 14, 15, 16, 18], "endeavor": 8, "thei": [8, 12, 14, 16, 27, 29, 32, 42], "applic": [8, 12, 16], "nope": 8, "consider": 8, "accord": [8, 25], "integr": [8, 12, 16, 17, 18, 19, 24], "condit": 8, "highli": [8, 9, 25], "welcom": [8, 10, 17, 18, 21, 23], "empow": [8, 16, 27, 29], "flourish": 8, "code": [8, 10, 12, 14, 15, 16, 20, 22, 25, 26, 27, 29, 42], "earn": [9, 12, 16], "spot": 9, "recogn": [9, 16, 18, 21], "celebr": 9, "grow": [9, 27, 29], "some": [9, 10, 12, 15, 16, 20, 22, 25, 26, 27, 29, 41], "opportun": 9, "proper": [9, 13], "kei": [9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 31, 32, 33, 35, 36, 39, 40, 42], "alwai": [9, 11, 13, 19, 22, 25], "honor": [9, 12, 16], "cultur": [9, 12, 16], "respect": [9, 12, 15], "enrich": 9, "world": [10, 12, 16, 18, 21, 23], "through": [10, 12, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26], "infer": [10, 11, 12, 13, 14, 15, 16, 26, 36], "differ": [10, 12, 14, 16, 18, 20, 26, 27, 29, 32, 42], "task": [10, 19], "aspect": 10, "modular": [10, 25, 26, 27, 29], "abstract": [10, 11, 25, 26, 27, 29, 30, 43], "trivial": [10, 24, 25, 26], "switch": [10, 24, 25, 31, 42], "between": [10, 11, 12, 13, 14, 16, 18, 21, 24, 26, 27, 29, 31, 32, 40], "veri": 10, "pre": 10, "organ": [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], "two": [10, 11, 12, 13, 14, 15, 16, 19, 20, 21, 31], "my": [10, 14, 15, 16, 17, 18, 25], "understood": [10, 21], "pass": [10, 11, 12, 13, 14, 15, 16, 19, 22, 31, 32, 40, 42], "api": [10, 11, 12, 14, 15, 16, 18, 19, 20, 22, 25, 27, 29, 31, 36], "while": [10, 12, 13, 16, 20, 27, 29], "mani": [10, 26], "custom": [10, 11, 16, 18, 42], "sake": 10, "simplic": 10, "openai": [10, 12, 14, 15, 16, 17, 19, 25], "azur": [10, 12, 14, 15, 16, 17, 19], "divers": 10, "arrai": 10, "demonstr": [10, 12, 14, 16, 17, 20, 22], "illustr": [10, 13, 24, 26, 27, 29], "focus": 10, "chatatomicflow": [10, 11, 12, 14, 15, 16, 18, 19, 22, 24, 25, 27, 29], "versatil": [10, 18, 21, 23], "model": [10, 12, 14, 15, 16, 18, 19, 20, 25, 26, 27, 29, 31, 36, 37, 42], "llm": [10, 11, 12, 14, 15, 16, 18, 19, 20, 22], "via": [10, 14, 19, 20, 22, 27, 29], "textual": [10, 14, 20, 22], "respons": [10, 11, 12, 14, 16, 19, 20, 22, 25, 31, 32, 40], "given": [10, 12, 13, 15, 16, 18, 19, 21, 22, 23, 25, 32, 33, 34, 35, 36, 37, 41, 42], "worth": [10, 14], "note": [10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 25, 32, 42], "same": [10, 14, 16, 22, 31, 36, 42], "appli": [10, 11, 32, 33, 39], "avail": [10, 12, 14, 15, 16, 17, 19, 24, 25, 27, 29, 42], "further": [10, 12, 14, 15, 16, 19], "ado": 10, "dive": [10, 14, 17, 18, 21, 23], "concret": [10, 25], "sync_depend": [10, 12, 14, 15, 16, 24, 25, 30, 37], "definit": 10, "flow_vers": [10, 12, 14, 15, 16, 24, 25, 30, 43], "url": [10, 12, 14, 15, 16, 22, 24, 25, 31], "revis": [10, 12, 14, 15, 16, 24, 25, 30, 37], "main": [10, 12, 13, 14, 15, 16, 18, 21, 23, 24, 25, 42], "pip_requir": [10, 12, 15, 16], "txt": [10, 12, 15, 16], "empti": [10, 20, 25, 37], "instal": [10, 12, 15, 16, 18], "As": [10, 11, 12, 13, 14, 16, 20, 25], "so": [10, 12, 14, 15, 16, 19, 20, 25, 26, 42], "set": [10, 12, 14, 15, 16, 19, 22, 25, 27, 29, 30, 31, 32, 33, 35, 42], "after": [10, 11, 13], "been": [10, 12, 15, 16, 17, 20, 25, 42], "past": [10, 12, 15, 16, 17, 19], "backend": [10, 12, 14, 15, 16, 19, 20, 22, 25, 30, 43], "api_kei": [10, 12, 14, 15, 16, 17, 19, 25, 30, 31], "api_inform": [10, 12, 14, 15, 16, 19, 25], "apiinfo": [10, 12, 14, 15, 16, 25, 30, 31], "backend_us": [10, 12, 14, 15, 16, 19, 25, 30, 31], "api_bas": [10, 12, 14, 15, 16, 30, 31], "api_vers": [10, 12, 14, 15, 16, 30, 31], "copypas": 10, "demo": [10, 24, 25], "yaml": [10, 11, 12, 13, 14, 15, 16, 24, 25, 42], "configur": [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 24, 32, 42], "instanti": [10, 11, 12, 13, 14, 15, 16, 19, 32, 36, 39], "load": [10, 12, 14, 15, 16, 19, 30, 42, 43], "general_help": [10, 30, 43], "read_yaml_fil": [10, 11, 12, 13, 14, 15, 16, 30, 42], "cfg": [10, 12, 13, 14, 15, 16, 42], "attent": 10, "reader": [10, 25], "notic": [10, 15, 16, 19], "field": [10, 12, 13, 16, 27, 29, 42], "api_info": [10, 12, 14, 15, 16, 25, 30, 43], "snippet": [10, 12, 15, 16, 20, 22], "overrid": [10, 11, 12, 13, 14, 15, 16, 19, 25, 32], "config": [10, 12, 14, 15, 16, 17, 25, 27, 29, 32, 42], "_target_": [10, 11, 12, 13, 14, 15, 16, 19, 20, 25], "instantiate_from_default_config": [10, 11, 12, 13, 14, 15, 16, 19, 25, 30, 32], "simpleqa_flow": [10, 14], "A": [10, 11, 12, 13, 15, 16, 24, 25, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42], "interfac": [10, 11, 13, 14, 16, 18, 19, 20, 22, 25, 27, 29, 30, 32, 36, 43], "input_interface_non_initi": [10, 14, 20], "llm_lite": [10, 14, 15, 30, 43], "litellmbackend": [10, 14, 15, 30, 31], "overwrit": [10, 37], "person": [10, 15, 18], "put": [10, 12, 14, 15, 16], "flow_with_interfac": [10, 11, 12, 13, 14, 15, 16, 25, 36], "input_interfac": [10, 11, 12, 13, 14, 15, 16, 25, 32, 36], "none": [10, 11, 12, 13, 14, 15, 16, 19, 20, 22, 25, 31, 32, 33, 34, 35, 36, 37, 38, 40, 42], "output_interfac": [10, 11, 12, 13, 14, 15, 16, 25, 32, 36], "control": [10, 12, 15, 16, 19, 27, 29], "data": [10, 11, 12, 13, 14, 15, 16, 19, 20, 22, 25, 30, 32, 33, 34, 35, 36, 39, 40, 42], "don": [10, 14, 26], "manipul": 10, "leav": 10, "flowlaunch": [10, 11, 12, 13, 14, 15, 16, 18, 25, 30, 32, 36], "id": [10, 11, 12, 13, 14, 15, 16, 19, 24, 25, 37, 42], "0": [10, 11, 12, 13, 14, 15, 16, 20, 22, 25, 33], "capit": 10, "franc": 10, "_": [10, 11, 12, 13, 14, 15, 16, 19], "output": [10, 11, 12, 13, 14, 15, 16, 18, 19, 20, 22, 25, 30, 32, 33, 35, 36, 40, 42, 43], "launch": [10, 11, 12, 13, 14, 15, 16, 25, 30, 36], "print": [10, 11, 12, 13, 14, 15, 16, 24, 25, 33, 36, 42], "flow_output_data": [10, 11, 12, 13, 14, 15, 16], "congratul": [10, 12, 17, 25], "successfulli": [10, 12, 14, 15, 16, 17, 18], "runchatatomicflow": 10, "termin": [10, 12, 16, 17, 19], "quick_start": 10, "python": [10, 11, 12, 13, 14, 15, 16, 17, 19, 24, 25, 28, 33, 42], "upon": [10, 11, 12, 13, 14, 15, 16, 25], "api_output": [10, 14, 19, 20, 22], "pari": 10, "quickli": [10, 12, 16], "jupyt": 10, "flow_verse_playground": 10, "ipynb": 10, "quicki": 10, "react": [10, 12, 13, 14, 20, 21, 26, 27, 29], "chatinteractiveflowmodul": [10, 24, 25], "chatwithdemonstrationsflowmodul": 10, "autogptflowmodul": [10, 12], "visionflowmodul": 10, "present": [11, 13, 16], "atomicflow": [11, 18, 25, 30, 32], "gain": [11, 12, 13, 14, 15, 16, 18, 21, 23, 24], "relationship": [11, 18], "among": [11, 18], "acquir": [11, 13, 14, 15, 18, 21], "hand": [11, 18, 21, 23], "reversenumberatom": [11, 18], "subclass": [11, 13], "around": [11, 12, 16, 19, 26, 27, 29, 31], "framework": [11, 26, 27, 29, 32], "effect": [11, 12, 13, 26, 27, 29], "minim": [11, 13, 14, 25, 26, 27, 29, 32], "wrapper": [11, 19, 26, 27, 29, 31], "thing": [11, 14, 25], "most": [11, 12, 14, 16, 18, 19, 21, 23], "basic": [11, 12, 32], "gpt": [11, 14, 15, 24, 27, 29], "4": [11, 12, 14, 15, 16, 19], "prompt": [11, 12, 14, 15, 16, 17, 19, 20, 22, 26, 41], "intern": [11, 12, 16], "comput": [11, 12, 15, 16, 27, 29], "expos": 11, "exchang": [11, 27, 29], "engin": [11, 12, 14, 15, 16], "lctoolflowmodul": [11, 12, 15, 16, 19], "human": [11, 12, 14, 15, 20, 22, 26, 27, 29, 36, 40], "humanstandardinputflowmodul": [11, 12, 16, 19], "revers": [11, 13], "1234": [11, 13], "4321": [11, 13], "reversenumb": 11, "keyinterfac": [11, 13, 14, 16, 30, 39], "keys_to_select": [11, 13, 16, 33, 39], "connector": [11, 14], "caller": [11, 14, 37, 42], "keys_to_renam": [11, 13, 14, 16, 39], "output_numb": [11, 13], "reversed_numb": 11, "renam": [11, 13, 14, 33], "break": [11, 12, 13, 14, 15, 16], "down": [11, 12, 13, 14, 15, 16], "self": [11, 13, 14, 16, 19, 20, 22, 24, 25, 27, 29, 42], "explanatori": [11, 13, 14], "must": [11, 13, 19, 22, 25, 32], "transform": [11, 13, 33, 39], "call": [11, 12, 13, 14, 15, 16, 19, 20, 22, 24, 31, 32, 36, 37], "dictionari": [11, 12, 13, 14, 15, 16, 19, 31, 32, 33, 36, 37, 39, 40, 41, 42], "reverse_number_atom": [11, 13], "reversenumberatomicflow": [11, 13], "__init__": [11, 24, 25, 37], "kwarg": [11, 25, 31, 32, 34, 36, 40, 41, 42], "super": [11, 19, 25], "requir": [11, 12, 14, 15, 24, 25, 28, 32, 33, 36, 41], "input_data": [11, 19, 20, 22, 30, 32, 35], "dict": [11, 16, 19, 20, 22, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42], "str": [11, 16, 19, 20, 22, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42], "input_numb": 11, "int": [11, 31, 32, 33, 35, 36, 38, 42], "overrides_config": 11, "reli": [11, 24], "With": [11, 12, 13, 15, 24, 27, 29], "prepar": [11, 13, 16, 19, 20, 22], "place": [11, 12, 13, 14, 16, 17, 20], "proce": [11, 12, 13, 14, 15, 16], "invok": [11, 13, 20, 22], "list": [11, 12, 13, 14, 15, 16, 17, 19, 20, 22, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42], "sampl": [11, 13, 14, 15, 34, 36], "path_to_output_fil": [11, 12, 13, 14, 15, 16, 36, 42], "jsonl": [11, 12, 13, 14, 15, 16], "complet": [11, 12, 13, 14, 16, 19, 27, 29, 31], "expect": [11, 12, 13, 14, 16, 19, 20], "notabl": [11, 12, 26, 27, 29], "fixedreplyflowmodul": 11, "next": [11, 12, 13, 14, 15, 16, 19, 20, 22, 24, 25, 27, 29, 31], "composit": [11, 15, 16, 26, 27, 29, 30, 42, 43], "prequisit": [12, 15, 16, 22], "setting_up_aiflow": [12, 14, 15, 16], "q": [12, 13, 15, 16, 19, 42], "reactwithhumanfeedback": [12, 15, 16, 18], "implic": [12, 18], "profici": [12, 18, 25], "autogptflow": [12, 18, 19], "enhanc": [12, 16, 18], "previou": [12, 13, 15, 16, 18, 20, 21, 23, 33, 42], "observ": [12, 15, 16, 19, 22], "challeng": [12, 13, 16], "especi": 12, "prolong": [12, 16], "princip": 12, "emerg": [12, 16], "attempt": [12, 16], "transmit": [12, 19], "histori": [12, 13, 16, 19, 30, 32, 35, 40, 43], "eventu": [12, 16, 19], "surpass": [12, 16], "maximum": [12, 15, 16, 32, 35], "token": [12, 16, 17], "temporari": 12, "opt": 12, "send": [12, 16, 20, 22], "context": [12, 16, 19], "howev": [12, 15, 16, 22, 25], "approach": [12, 16, 19, 24], "suboptim": [12, 16], "object": [12, 13, 15, 16, 25, 31, 32, 33, 34, 35, 37, 38, 40, 41, 42], "enabl": [12, 13, 19, 27, 29, 42], "memori": [12, 16, 18], "consequ": [12, 16, 18], "circular": [12, 13, 15, 16, 19, 30, 43], "solv": [12, 15, 16, 19], "four": [12, 16, 19], "distinct": [12, 14, 15, 16, 18, 19, 20], "controllerflow": [12, 15, 16], "predefin": [12, 15, 16, 19], "action": [12, 15, 16, 19, 42], "explicitli": [12, 15, 16, 19, 22, 25], "defin": [12, 14, 15, 16, 19, 20, 22, 26, 27, 29, 32, 39], "executorflow": [12, 15, 16], "closer": [12, 19, 20, 22], "accomplish": [12, 13, 16, 19], "branch": [12, 13, 14, 15, 16, 19, 25, 30, 43], "encompass": [12, 15, 16, 19], "subflow": [12, 13, 15, 16, 18, 21, 30, 32], "particular": [12, 15, 16, 19], "associ": [12, 15, 16, 19, 25], "chosen": [12, 14, 15, 16, 19], "setup": [12, 15, 16, 17, 19], "individu": [12, 15, 16, 19], "wikisearchatomicflow": [12, 15, 16], "wikipedia": [12, 14, 15, 16, 19, 20], "content": [12, 13, 15, 16, 19, 20, 22, 43], "lctoolflow": [12, 15, 16], "duckduckgosearchrun": [12, 15, 16, 19], "duckduckgo": [12, 15, 16, 19], "retriev": [12, 14, 15, 16, 18, 19, 37], "humanfeedbackflow": [12, 16, 19], "latest": [12, 14, 16, 19, 25], "collect": [12, 16, 19], "convei": [12, 16, 19], "back": [12, 13, 16, 19, 25], "subsequ": [12, 13, 14, 16, 19, 20], "capabl": [12, 14, 15, 16, 18, 19], "reactwithhumanfeedbackflow": [12, 16, 19], "prefer": [12, 16, 19, 24, 25], "memoryflow": [12, 19], "read": [12, 17, 19, 37, 42], "store": [12, 19], "databas": [12, 19], "vectorstoreflow": [12, 19], "broad": [12, 19], "overview": [12, 19], "v": [12, 19], "humanfeedback": [12, 16, 19], "executor": [12, 15, 16, 19], "referenc": [12, 14, 15, 16], "point": [12, 14, 15, 16, 25, 27, 29, 36], "onward": [12, 14, 15, 16], "delai": [12, 14, 15, 16], "fetch": [12, 14, 15, 16, 19, 37], "dependeci": [12, 14, 15, 16], "depend": [12, 13, 14, 15, 16, 18, 20, 23, 24, 25, 37], "fact": [12, 16], "accompani": [12, 16], "extern": [12, 15, 16, 18], "examin": [12, 16, 22, 24, 25], "necess": [12, 16], "haven": [12, 16, 17], "3": [12, 14, 15, 16, 19, 25, 28], "9": [12, 15, 16, 24], "6": [12, 15, 16], "langchain": [12, 15, 16, 19], "336": [12, 15, 16], "chromadb": [12, 19], "29": 12, "faiss": 12, "cpu": 12, "7": 12, "chunk": [12, 14, 15, 16], "default": [12, 13, 14, 15, 16, 19, 20, 22, 24, 25, 32, 36, 37, 42], "wish": [12, 14, 15, 16, 25], "found": [12, 14, 15, 16, 33, 37, 42], "unchang": [12, 16], "max_round": [12, 15, 16, 32], "30": [12, 15, 16, 42], "round": [12, 15, 16, 32], "subflows_config": [12, 13, 15, 16, 32], "controllerexecutorflowmodul": [12, 15, 16, 19], "controlleratomicflow": [12, 15, 16, 19], "wiki_search": [12, 15, 16], "perform": [12, 14, 15, 16, 19, 42], "input_arg": [12, 15, 16], "search_term": [12, 15, 16, 19], "ddg_search": [12, 15, 16, 19], "finish": [12, 15, 16, 19, 26], "signal": [12, 15, 16], "satisfi": [12, 15, 16, 25], "human_message_prompt_templ": [12, 16, 19, 20, 22], "human_feedback": [12, 16, 19], "input_vari": [12, 14, 16, 19, 20, 25, 41], "input_interface_initi": [12, 14, 16, 19, 20], "previous_messag": [12, 16], "last_k": [12, 16], "first_k": [12, 16], "ident": [12, 16, 42], "base_flow": [12, 13, 15, 16, 25, 30, 43], "branchingflow": [12, 13, 15, 16, 19, 30, 32], "model_nam": [12, 14, 15, 31], "primarili": 12, "defaut": 12, "flowcard": [12, 14, 19, 20, 22], "along": [12, 14, 15, 16], "o": [12, 13, 14, 15, 16, 19, 20, 25], "getenv": [12, 14, 15, 16, 19, 25], "openai_api_kei": [12, 14, 15, 16, 17, 19, 25], "azure_api_bas": [12, 14, 15, 16, 17], "azure_openai_kei": [12, 14, 15, 16, 17], "azure_api_vers": [12, 14, 15, 16, 17], "insert": [12, 14, 15, 16, 20, 25], "shown": 12, "cfg_path": [12, 13, 14, 15, 16], "hydra": [12, 14, 15, 16, 42], "_recursive_": [12, 14, 15, 16], "fals": [12, 13, 14, 15, 16, 32, 33, 36, 37, 42], "_convert_": [12, 14, 15], "partial": [12, 14, 15, 30, 41], "els": [12, 14, 15, 16, 19, 20, 22], "lastli": 12, "profess": [12, 15, 16], "date": [12, 14, 15, 16], "birth": [12, 15, 16], "michael": [12, 15, 16], "jordan": [12, 15, 16], "At": [12, 16], "basketbal": [12, 15, 16], "player": [12, 15, 16], "statistician": [12, 16], "skip": [12, 16], "correct": [12, 15, 16], "uncom": [12, 14, 15, 16], "line": [12, 14, 15, 16, 42], "save": [12, 13, 14, 15, 16, 20, 22, 33], "disk": [12, 14, 15, 16], "resembl": [12, 16], "relev": [12, 16, 19], "arg": [12, 16, 19, 42], "wiki_cont": [12, 16, 19], "jeffrei": [12, 16], "born": [12, 15, 16], "februari": [12, 15, 16], "17": [12, 15, 16, 25], "1963": [12, 15, 16], "known": [12, 16], "hi": [12, 16], "mj": [12, 16], "american": [12, 15, 16], "businessman": [12, 15, 16], "former": [12, 15, 16], "profession": [12, 15, 16], "profil": [12, 16], "offici": [12, 16], "nation": [12, 16], "nba": [12, 14, 15, 16], "websit": [12, 16], "state": [12, 13, 16, 20, 22, 32, 35, 42], "acclam": [12, 16], "greatest": [12, 16], "time": [12, 14, 16, 31, 36, 42], "he": [12, 15, 16], "plai": [12, 13, 16, 19], "fifteen": [12, 16], "season": [12, 16], "win": [12, 16], "six": [12, 16], "championship": [12, 16], "chicago": [12, 16], "bull": [12, 16], "wa": [12, 14, 15, 16, 20, 22, 25, 33, 42], "popular": [12, 16], "sport": [12, 14, 16], "1980": [12, 16], "1990": [12, 16], "global": [12, 16, 35, 42], "icon": [12, 16], "colleg": [12, 16], "three": [12, 16], "coach": [12, 16], "dean": [12, 16], "smith": [12, 16], "north": [12, 16], "carolina": [12, 16], "tar": [12, 16], "heel": [12, 16], "freshman": [12, 16], "1982": [12, 16], "1984": [12, 16], "third": [12, 16, 26, 27, 29], "draft": [12, 16], "leagu": [12, 16], "entertain": [12, 16], "crowd": [12, 16], "prolif": [12, 16], "score": [12, 16], "reput": [12, 16], "game": [12, 16], "defens": [12, 16], "leap": [12, 16], "abil": [12, 14, 16, 19, 27, 29], "slam": [12, 16], "dunk": [12, 16], "throw": [12, 16], "contest": [12, 16], "him": [12, 16], "nicknam": [12, 16], "air": [12, 16], "won": [12, 16], "1991": [12, 16], "1992": [12, 16], "1993": [12, 16], "secur": [12, 16], "peat": [12, 16], "abruptli": [12, 16], "retir": [12, 16], "94": [12, 16], "minor": [12, 16], "basebal": [12, 16], "march": [12, 16], "1995": [12, 16], "led": [12, 16], "1996": [12, 16], "1997": [12, 16], "1998": [12, 16], "record": [12, 16], "72": [12, 16], "regular": [12, 14, 16], "96": [12, 16], "januari": [12, 16], "1999": [12, 16], "2001": [12, 16], "2003": [12, 16], "washington": [12, 16], "wizard": [12, 16], "dure": [12, 16, 19, 25, 36], "career": [12, 16], "unit": [12, 16, 27, 29, 32], "gold": [12, 16], "medal": [12, 16], "1983": [12, 16], "pan": [12, 16], "summer": [12, 16], "olymp": [12, 16], "tournament": [12, 16], "america": [12, 16], "undef": [12, 16], "accolad": [12, 16], "final": [12, 14, 15, 16, 17, 20, 22, 25], "mvp": [12, 16], "award": [12, 16], "ten": [12, 16], "five": [12, 16], "nine": [12, 16], "fourteen": [12, 16], "steal": [12, 16], "1988": [12, 16], "hold": [12, 16], "averag": [12, 16, 25], "per": [12, 16], "playoff": [12, 16], "33": [12, 16], "20th": [12, 16], "centuri": [12, 16], "athlet": [12, 16], "espn": [12, 16], "babe": [12, 16], "ruth": [12, 16], "press": [12, 16, 25], "twice": [12, 13, 16], "induct": [12, 16], "naismith": [12, 16], "hall": [12, 16], "fame": [12, 16], "2009": [12, 16], "2010": [12, 16], "men": [12, 16], "becam": [12, 16], "12": [12, 16], "06": [12, 16], "09": [12, 16, 25], "40": [12, 16, 42], "844": [12, 16], "humanstandardinputflow": [12, 16, 19], "126": [12, 16], "enter": [12, 16, 17, 19, 25], "singl": [12, 14, 16, 17, 32, 36], "respond": [12, 16, 19], "No": [12, 16, 19], "talk": [12, 16], "irwin": [12, 16], "think": [12, 16, 24, 27, 29], "mayb": [12, 16], "25": [12, 16], "1956": [12, 16], "scientist": [12, 16], "professor": [12, 15, 16], "univers": [12, 16], "california": [12, 16], "berkelei": [12, 15, 16], "statist": [12, 15, 16, 25], "artifici": [12, 16], "intellig": [12, 16], "elect": [12, 16], "academi": [12, 16], "nhe": [12, 16], "figur": [12, 16, 26], "2016": [12, 16], "scienc": [12, 15, 16], "influenti": [12, 16], "2022": [12, 16], "inaugur": [12, 16], "laureat": [12, 16], "prize": [12, 16], "mathemat": [12, 16], "fundament": [12, 14, 16, 18, 27, 29], "n": [12, 14, 16, 19, 20, 22, 31, 38], "educ": [12, 16], "njordan": [12, 16], "magna": [12, 16], "cum": [12, 16], "laud": [12, 16], "psychologi": [12, 16], "1978": [12, 16], "louisiana": [12, 16], "arizona": [12, 16], "phd": [12, 16], "cognit": [12, 16], "1985": [12, 16], "san": [12, 16], "diego": [12, 16], "student": [12, 16], "david": [12, 16], "rumelhart": [12, 16], "parallel": [12, 13, 16], "pdp": [12, 16], "group": [12, 16, 33], "pehong": [12, 16], "chen": [12, 16], "distinguish": [12, 14, 16], "appoint": [12, 16], "split": [12, 16], "across": [12, 16, 24, 27, 29, 42], "eec": [12, 16], "depart": [12, 16], "brain": [12, 16], "recurr": [12, 16], "neural": [12, 16], "network": [12, 16], "recent": [12, 16, 26], "less": [12, 16], "perspect": [12, 16], "background": [12, 16], "tradit": [12, 16], "popularis": [12, 16], "bayesian": [12, 16], "promin": [12, 16], "formalis": [12, 16], "variat": [12, 16], "method": [12, 14, 15, 16, 19, 32, 36, 37, 40, 41, 42], "approxim": [12, 16], "resign": [12, 16], "nin": [12, 16], "editori": [12, 16], "board": [12, 16], "journal": [12, 16], "letter": [12, 16], "argu": [12, 16], "restrict": [12, 16], "pledg": [12, 16], "lesli": [12, 16], "kaelbl": [12, 16], "evolut": [12, 16], "numer": [12, 16], "x": [12, 16], "nguyen": [12, 16], "wainwright": [12, 16], "confer": [12, 16], "icml": [12, 16], "2004": [12, 16], "r": [12, 16, 25], "jacob": [12, 16], "acc": [12, 16], "acm": [12, 16], "aaai": [12, 16], "allen": [12, 16], "newel": [12, 16], "ieee": [12, 16], "pioneer": [12, 16], "nsf": [12, 16], "presidenti": [12, 16], "young": [12, 16], "investig": [12, 16], "2002": [12, 16], "fellow": [12, 16], "uncertainti": [12, 16], "motor": [12, 16], "im": [12, 16], "graphic": [12, 16], "2005": [12, 16], "53": [12, 16], "52": [12, 16], "058": [12, 16], "There": [12, 13, 16, 22], "statu": [12, 15, 16], "succesfulli": 12, "prerequisit": [13, 14, 25], "atom": [13, 14, 15, 19, 26, 27, 29, 30, 43], "type": [13, 14, 16, 19, 20, 22, 27, 29, 30, 31, 32, 35, 36, 37, 38, 40, 41, 42], "sequentialflow": [13, 18, 30, 32], "toi": [13, 18], "entail": 13, "seri": 13, "compositeflow": [13, 30, 32], "higher": 13, "level": [13, 42], "readili": [13, 24, 27, 29], "arbitrari": [13, 25], "pattern": 13, "therefor": [13, 25], "form": 13, "circularflow": [13, 16, 30, 32], "excut": [13, 15], "fashion": [13, 14, 25, 32, 36], "introductori": 13, "construct": [13, 14, 19, 20, 22, 26, 27, 29, 32, 37], "unfold": 13, "reversenumbersequenti": 13, "reversenumbertwic": 13, "first_reverse_flow": 13, "reversenumberfirst": 13, "second_reverse_flow": 13, "reversenumbersecond": 13, "order": [13, 14, 16, 18, 19, 21, 23, 42], "topologi": [13, 16, 32], "fist": 13, "first_reverse_output": 13, "reset": [13, 16, 30, 32, 40, 42], "produc": [13, 14], "constitu": 13, "articul": 13, "item": [13, 14, 15, 25], "pair": [13, 14], "denot": 13, "assign": 13, "encapsul": [13, 19, 24, 32], "instanc": [13, 14, 15, 24, 25, 27, 29, 36], "match": [13, 33], "current": [13, 16, 19, 25, 27, 29, 33, 42], "determin": [13, 16, 32, 42], "delet": [13, 25, 33, 39, 42], "cach": [13, 35, 36, 37, 42], "role": [13, 14, 16, 20, 22, 40], "reverse_number_sequenti": 13, "path": [13, 14, 15, 16, 18, 21, 22, 23, 25, 36, 37, 42], "root_dir": [13, 14, 15, 16], "sinc": [13, 15, 21, 25], "mean": 13, "minimalqa": 14, "myself": [14, 15, 18], "hub": [14, 24, 27, 29], "amaz": 14, "usual": [14, 32], "hug": [14, 18, 24, 25, 37], "face": [14, 18, 24, 25, 37], "intent": [14, 16], "reus": [14, 25, 27, 29], "assit": 14, "seamlessli": [14, 17, 19, 20, 22, 24, 27, 29], "power": [14, 18, 20, 21, 22, 23, 24, 27, 29], "litellm": [14, 17, 20, 22, 31], "full": [14, 15, 20, 22, 25, 35, 36, 40], "depth": 14, "do": [14, 15, 19, 25, 35], "run_qa_flow": 14, "297c90d08087d9ff3139521f11d1a48d7dc63ed4": 14, "indic": [14, 32, 36, 42], "flowmodul": 14, "host": 14, "liter": 14, "address": [14, 16], "huggingfac": [14, 17, 24, 25, 27, 29], "co": [14, 25], "browser": 14, "repres": [14, 15, 20, 26, 27, 29, 32, 37, 38, 40], "hash": [14, 35, 37, 42], "chatatomicflowmodul": 14, "simpleqa": 14, "additional_transform": [14, 16, 39], "data_transform": [14, 16, 30, 32, 43], "keymatchinput": [14, 16, 30, 33], "system": [14, 15, 16, 19, 20, 22, 26, 27, 29, 40], "appar": 14, "never": 14, "5": [14, 15, 25], "turbo": [14, 15], "generation_paramet": 14, "max_token": 14, "3000": 14, "temperatur": 14, "top_p": 14, "frequency_penalti": 14, "presence_penalti": 14, "later": [14, 20, 25], "privaci": 14, "attribut": [14, 36, 38], "anthrop": 14, "claud": 14, "system_message_prompt_templ": [14, 19, 25], "prompt_templ": [14, 19, 20, 22, 25, 30, 43], "jinjaprompt": [14, 19, 20, 25, 30, 41], "chatbot": [14, 25], "truthfulli": 14, "partial_vari": [14, 41], "init_human_message_prompt_templ": [14, 20, 22], "jinja": [14, 16, 41], "format": [14, 16, 19, 30, 37, 40, 41, 42], "placehold": [14, 16, 19, 20], "anthropic_api_kei": 14, "done": [14, 25], "champion": [14, 15], "qa": [14, 26, 27, 29], "sorri": 14, "real": 14, "predict": [14, 30, 36, 42], "event": [14, 42], "cannot": 14, "reliabl": 14, "conduct": 14, "internet": [14, 19, 22], "obtain": [14, 15], "finshi": 14, "sire": 14, "run_qa_flow_w_demonstr": 14, "multithread": [14, 36], "mulitpl": 14, "run_qa_flow_multithread": 14, "controllerexecutorflow": [15, 16, 18], "synerg": 15, "act": [15, 19], "prior": [15, 16], "decis": [15, 16, 19], "choos": [15, 16], "repeat": [15, 36], "until": 15, "anoth": [15, 36, 40, 42], "argument": [15, 16, 19, 32, 36, 40, 41, 42], "noteworthi": [15, 16], "subflow_config": 15, "popul": 15, "canada": 15, "denver": 15, "nugget": 15, "instruct": 15, "electr": [15, 16], "berklei": 15, "intend": [15, 26], "discov": 15, "grasp": [16, 18], "particularli": [16, 18], "lack": [16, 18], "deepen": [16, 18], "eventhough": 16, "fail": [16, 42], "situat": 16, "mention": 16, "misinterpret": 16, "latter": 16, "intermedi": 16, "handl": [16, 22, 23, 24, 42], "preced": 16, "reactwithhumandfeedback": 16, "output_msg_payload_processor": [16, 30, 32], "detect_finish_in_human_input": 16, "output_payload": 16, "src_flow": [16, 32, 40], "human_input": [16, 19], "strip": [16, 19, 33], "lower": 16, "early_exit": 16, "true": [16, 19, 31, 32, 33, 35, 36, 37, 42], "exit": [16, 19], "unfinish": 16, "simpli": [16, 25, 42], "procedur": [16, 25], "keep": [16, 32, 42], "remain": 16, "nearli": 16, "inclus": 16, "extra": 16, "pararamet": 16, "fed": 16, "contollerflow": 16, "mechan": 16, "chat": [16, 22, 31, 40], "exceed": 16, "overriden": 16, "earliest": 16, "request_multi_line_input_flag": 16, "query_message_prompt_templ": [16, 19], "command_arg": [16, 19], "boolean": [16, 36, 42], "multi": [16, 27, 29, 36], "detect_finish_or_continu": 16, "branch_input_data": 16, "branch_output_data": 16, "comparison": 16, "sole": 16, "alter": 16, "earlier": 16, "nevertheless": 16, "persist": 16, "reveal": 16, "inher": 16, "primari": 16, "aris": [16, 24], "workaround": 16, "autogpt": [16, 21, 26, 27, 29], "longer": 16, "straightforward": [17, 19], "walk": 17, "smooth": 17, "10": [17, 25, 28, 42], "altern": 17, "manual": [17, 24, 25], "clone": [17, 24, 28], "epfl": [17, 28], "dlab": [17, 25, 28], "conda": 17, "mkdir": [17, 25], "download": [17, 24, 27, 29], "arrang": 17, "yourproject": 17, "conveni": 17, "simplifi": [17, 27, 29], "verifi": 17, "email": 17, "log": [17, 19, 25, 30, 40, 43], "login": 17, "suffici": 17, "cli": 17, "effortless": 17, "varieti": 17, "doc": [17, 19], "env": [17, 42], "var": [17, 42], "reactiv": 17, "name_of_your_environ": 17, "equip": 17, "gatewai": [18, 21, 23], "master": [18, 21, 23, 24], "readi": [18, 21, 23, 25], "journei": [18, 21, 23], "practic": [18, 21, 23, 24], "deeper": [18, 21, 23], "option": [18, 31, 32, 33, 35, 36, 37, 39, 40, 42], "sequenc": [19, 42], "chromadbflow": 19, "vectorstoreflowmodul": 19, "either": [19, 22, 25], "rtype": [19, 20, 22], "get_kei": [19, 30, 31], "embed": [19, 31], "openaiembed": 19, "todo": 19, "valueerror": [19, 37, 42], "f": [19, 25, 42], "isinst": 19, "string": [19, 20, 31, 32, 33, 37, 38, 40, 42], "got": 19, "query_result": 19, "query_embed": 19, "embed_queri": 19, "n_result": 19, "flow_config": [19, 20, 22, 30, 32, 42], "elif": 19, "uuid": 19, "uuid4": 19, "rang": 19, "len": [19, 20, 22], "embed_docu": 19, "One": [19, 36], "vector": 19, "offer": 19, "utilis": 19, "vectordb": 19, "json": [19, 30, 40, 42, 43], "hard": [19, 26], "hallucin": 19, "invalid": [19, 37, 42], "critic": [19, 42], "isn": 19, "strictli": 19, "enforc": 19, "occasion": 19, "soft": [19, 25], "smart": 19, "made": [19, 25], "independ": [19, 27, 29, 36], "strength": 19, "pursu": 19, "strategi": [19, 26], "complic": 19, "exclus": 19, "doubl": [19, 42], "quot": 19, "evalu": 19, "analyz": 19, "big": 19, "pictur": 19, "behavior": [19, 26, 27, 29], "reflect": 19, "refin": 19, "cost": [19, 25], "short": [19, 26], "bullet": 19, "speak": 19, "summari": [19, 37, 42], "sai": [19, 25], "pars": 19, "human_message_prompt": 19, "init_human_message_prompt": 19, "tool_input": 19, "By": 19, "compat": 19, "wikisearch": 19, "api_wrapp": 19, "wikipediaapiwrapp": 19, "lang": 19, "top_k_result": 19, "doc_content_chars_max": 19, "page_cont": 19, "_fetch_pag": 19, "search_respons": 19, "relevant_pag": 19, "page_titl": 19, "search_page_titl": 19, "humanstandadinputflow": 19, "Its": 19, "conclus": 19, "iter": [19, 36], "append": [19, 37], "therebi": 19, "influenc": 19, "agent": [19, 27, 29], "query_messag": 19, "_get_messag": [19, 20, 22], "state_update_messag": 19, "updatemessage_gener": [19, 30, 40], "created_bi": [19, 25, 30, 40], "updated_flow": [19, 40], "_log_messag": 19, "_read_input": 19, "trigger": 19, "_process_input": [20, 22], "_call": [20, 22], "loop": [20, 22, 27, 29], "than": [20, 22], "1": [20, 22, 25, 32, 36, 42], "_state_update_add_chat_messag": [20, 22], "assistant_nam": [20, 22], "abov": [20, 26], "_is_conversation_initi": [20, 22], "user_message_cont": [20, 22], "_initialize_convers": [20, 22], "getattr": [20, 22], "user_nam": [20, 22], "submiss": [20, 25], "choic": [20, 27, 29, 31], "sent": 20, "deriv": 20, "differenti": 20, "signifi": 20, "input_dict": 20, "lot": 21, "visionatomicflow": [22, 27, 29], "imag": [22, 42], "video": [22, 42], "inherit": [22, 25, 32], "get_user_messag": 22, "similarli": 22, "slight": 22, "modif": 22, "staticmethod": 22, "prompttempl": 22, "text": 22, "media_data": 22, "get_video": 22, "get_imag": 22, "extend": [22, 24, 26, 27, 29], "test": 23, "compos": [24, 27, 29, 42], "novel": [24, 27, 29], "heart": 24, "uniqu": [24, 31, 40, 42], "nbaldwin": [24, 25], "chathumanflowmodul": [24, 25], "remot": [24, 25, 37], "publicli": [24, 27, 29], "directori": [24, 25, 35, 36, 37, 42], "root": [24, 25, 37, 42], "packag": [24, 43], "trivial_sync_demo": [24, 25], "chatflow": 24, "__name__": [24, 25], "__main__": [24, 25], "synchron": [24, 37], "though": 24, "seem": 24, "unconvent": 24, "advantag": 24, "elimin": 24, "web": 24, "effortlessli": 24, "manifest": 24, "valid": [24, 37, 42], "exemplifi": [24, 26, 27, 29], "importantli": 24, "retain": 24, "upheld": 24, "conflict": 24, "warn": [24, 25, 42], "visual": 24, "represent": [24, 38, 40], "dev": [24, 25], "tree": [24, 42], "__pycache__": 24, "cpython": 24, "39": 24, "pyc": 24, "16": 24, "mirror": 24, "typic": [24, 40, 42], "path_to_local_dev_directori": 25, "dev_usefulchatbot": 25, "touch": 25, "gitignor": [25, 37], "economicexpertbot": 25, "init": 25, "e592fd1": 25, "mode": [25, 36, 37, 42], "100644": 25, "scratch": 25, "spec": 25, "economi": 25, "2": [25, 34, 42], "expertis": 25, "financ": 25, "invest": 25, "tend": 25, "english": 25, "chines": 25, "fluentli": 25, "seen": 25, "succinct": 25, "tweak": 25, "ideal": 25, "rel": 25, "insid": 25, "instantli": 25, "far": 25, "good": 25, "yeeef": 25, "usefulchatbot": 25, "flow_launch": [25, 30, 43], "bot": 25, "input_kei": [25, 33], "cpi": 25, "output_kei": [25, 33], "yet": 25, "sync": [25, 37], "instead": 25, "symbol": 25, "propag": [25, 42], "onlin": 25, "Then": 25, "ask_economic_expert_bot": 25, "07": 25, "05": 25, "35": 25, "530": 25, "raw": [25, 40], "inference_output": 25, "outputmessag": [25, 30, 40], "message_id": [25, 30, 40], "d95683d6": 25, "9507": 25, "4a90": 25, "b290": 25, "6a43e609c904": 25, "created_at": [25, 30, 40], "530972000": 25, "message_typ": [25, 30, 40], "output_data": [25, 40], "consum": 25, "price": 25, "index": [25, 34, 35], "measur": 25, "weight": 25, "basket": 25, "servic": 25, "transport": 25, "food": 25, "medic": 25, "care": 25, "calcul": 25, "predetermin": 25, "assess": 25, "live": 25, "missing_output_kei": 25, "private_kei": [25, 30, 40], "error": [25, 36, 42], "allign": 25, "botton": 25, "button": 25, "upload": 25, "webpag": 25, "upstream": 25, "script": 25, "enough": 25, "pr": 25, "cae3fdf2f0ef7f28127cf4bc35ce985c5fc4d19a": 25, "firstli": 25, "desktop": 25, "ra": 25, "d7465df": 25, "enumer": 25, "11": 25, "count": 25, "100": 25, "delta": [25, 31], "compress": 25, "thread": [25, 35, 36], "8": 25, "952": 25, "byte": 25, "00": 25, "kib": 25, "total": 25, "pack": 25, "1849a87": 25, "1818057": 25, "ref": 25, "easili": 25, "substanti": 25, "mychatinteractiveflowmodul": 25, "cp": 25, "treat": 25, "vastli": 26, "walkthrough": 26, "gradual": 26, "vanilla": 26, "pun": 26, "propos": 26, "essenc": 26, "column": [26, 27, 29], "depict": [26, 27, 29], "fourth": [26, 27, 29], "competit": [26, 27, 29], "fifth": [26, 27, 29], "hypothet": [26, 27, 29], "meta": [26, 27, 29], "autonom": [26, 27, 29], "embodi": [27, 29], "stack": [27, 29], "lego": [27, 29], "arbitrarili": [27, 29], "nest": [27, 29, 33, 42], "behind": [27, 29], "reusabl": [27, 29], "Being": [27, 29], "actor": [27, 29], "center": [27, 29], "entiti": [27, 29], "abl": [27, 29], "semant": [27, 29], "recipi": [27, 29], "subsum": [27, 29], "visualqa": [27, 29], "revolution": [27, 29], "advanc": [27, 29], "freeli": [27, 29], "studi": [27, 29], "deploy": [27, 29], "top": [27, 29, 42], "infrastructur": [27, 29], "cc_flow": [27, 29], "simultan": [27, 29], "own": [27, 29], "submodul": [30, 43], "make_unique_api_info_kei": [30, 31], "merge_delta_to_stream": [30, 31], "merge_stream": [30, 31], "required_keys_config": [30, 32], "supports_cach": [30, 32], "flow_stat": [30, 32], "get_config": [30, 32], "get_interface_descript": [30, 32], "instantiate_from_config": [30, 32], "instantiate_with_overrid": [30, 32], "set_up_flow_st": [30, 32], "input_msg_payload_build": [30, 32], "topologynod": [30, 32], "sequenti": [30, 43], "datatransform": [30, 32, 33], "end_of_interact": [30, 43], "endofinteract": [30, 33], "json2obj": [30, 33], "obj2json": [30, 33], "key_copi": [30, 43], "keycopi": [30, 33, 39], "key_delet": [30, 43], "keydelet": [30, 33, 39], "key_match_input": [30, 43], "key_renam": [30, 43], "keyrenam": [30, 33, 39], "key_select": [30, 43], "keyselect": [30, 33, 39], "key_set": [30, 43], "keyset": [30, 33, 39], "print_previous_messag": [30, 43], "printpreviousmessag": [30, 33], "regex_extractor_first": [30, 43], "regexfirstoccurrenceextractor": [30, 33], "unnesting_dict": [30, 43], "unnest": [30, 33], "dataset": [30, 43], "abstractdataset": [30, 34], "demonstrations_11": [30, 43], "genericdemonstrationsdataset": [30, 34], "outputsdataset": [30, 34], "get_output_data": [30, 34, 40], "flow_cach": [30, 43], "caching_paramet": [30, 35], "cache_dir": [30, 35, 37], "do_cach": [30, 35], "max_cached_entri": [30, 35], "cachingkei": [30, 35], "hash_str": [30, 35], "keys_to_ignore_for_hash": [30, 35], "cachingvalu": [30, 35], "full_stat": [30, 35], "history_messages_cr": [30, 35], "output_result": [30, 35], "flowcach": [30, 35], "pop": [30, 35, 42], "clear_cach": [30, 35], "get_cache_dir": [30, 35], "baselaunch": [30, 36], "predict_dataload": [30, 36], "write_batch_output": [30, 36], "multithreadedapilaunch": [30, 36], "flow_api_launch": [30, 43], "predict_batch": [30, 36], "predict_sampl": [30, 36], "flowmodulespec": [30, 37], "build_mod_id": [30, 37], "commit_hash": [30, 37], "mod_id": [30, 37], "repo_id": [30, 37], "sync_dir": [30, 37], "flowmodulespecsummari": [30, 37], "add_mod": [30, 37], "cache_root": [30, 37], "from_flow_mod_fil": [30, 37], "get_mod": [30, 37], "serial": [30, 37, 40, 42], "sync_root": [30, 37], "add_to_sys_path": [30, 37], "create_empty_flow_mod_fil": [30, 37], "create_init_pi": [30, 37], "extract_commit_hash_from_cache_mod_dir": [30, 37], "fetch_loc": [30, 37], "fetch_remot": [30, 37], "is_local_revis": [30, 37], "is_local_sync_dir_valid": [30, 37], "is_sync_dir_modifi": [30, 37], "remove_dir_or_link": [30, 37], "retrive_commit_hash_from_remot": [30, 37], "sync_local_dep": [30, 37], "sync_remote_dep": [30, 37], "validate_and_augment_depend": [30, 37], "write_flow_mod_summari": [30, 37], "write_or_append_gitignor": [30, 37], "build_hf_cache_path": [30, 37], "flow_histori": [30, 43], "flowhistori": [30, 32, 38, 40], "add_messag": [30, 38], "get_last_n_messag": [30, 38], "to_list": [30, 38], "to_str": [30, 38, 40], "key_interfac": [30, 43], "to_dict": [30, 40], "flow_messag": [30, 43], "inputmessag": [30, 40], "updatemessage_chatmessag": [30, 40], "updatemessage_fullreset": [30, 40], "updatemessage_namespacereset": [30, 40], "jinja2_prompt": [30, 43], "create_unique_id": [30, 42], "encode_from_buff": [30, 42], "encode_imag": [30, 42], "exception_handl": [30, 42], "extract_top_level_function_nam": [30, 42], "find_replace_in_dict": [30, 42], "flatten_dict": [30, 42], "get_current_datetime_n": [30, 42], "get_function_from_nam": [30, 42], "get_predictions_dir_path": [30, 42], "log_suggest_help": [30, 42], "nested_keys_pop": [30, 42], "nested_keys_search": [30, 42], "nested_keys_upd": [30, 42], "process_config_leaf": [30, 42], "python_file_path_to_module_path": [30, 42], "python_module_path_to_file_path": [30, 42], "read_gzipped_jsonlin": [30, 42], "read_jsonlin": [30, 42], "read_output": [30, 42], "recursive_dictionary_upd": [30, 42], "try_except_decor": [30, 42], "unflatten_dict": [30, 42], "validate_flow_config": [30, 42], "write_gzipped_jsonlin": [30, 42], "write_jsonlin": [30, 42], "write_output": [30, 42], "io_util": [30, 43], "load_pickl": [30, 42], "recursive_json_seri": [30, 42], "add_handl": [30, 42], "auto_set_dir": [30, 42], "disable_default_handl": [30, 42], "disable_propag": [30, 42], "enable_default_handl": [30, 42], "enable_explicit_format": [30, 42], "enable_propag": [30, 42], "get_log_levels_dict": [30, 42], "get_logg": [30, 42], "get_logger_dir": [30, 42], "get_verbos": [30, 42], "remove_handl": [30, 42], "reset_format": [30, 42], "set_dir": [30, 42], "set_verbos": [30, 42], "set_verbosity_debug": [30, 42], "set_verbosity_error": [30, 42], "set_verbosity_info": [30, 42], "set_verbosity_warn": [30, 42], "warning_advic": [30, 42], "warning_onc": [30, 42], "rich_util": [30, 43], "print_config_tre": [30, 42], "exhaust": 31, "union": [31, 36, 42], "wait_time_per_kei": [31, 36], "minimum": 31, "wait": [31, 36], "embeddings_cal": 31, "bool": [31, 32, 33, 35, 36, 37, 42], "static": [31, 32, 34, 36, 37, 40], "merged_stream": 31, "stream": 31, "streamed_respons": 31, "n_chat_completion_choic": 31, "abc": [32, 33, 36, 39], "classmethod": [32, 36], "recurs": [32, 42], "properti": [32, 37], "full_reset": 32, "launcher": [32, 36], "remov": [32, 37, 42], "constructor": [32, 40], "early_exit_kei": 32, "end": [32, 33], "node": [32, 40], "programmat": 32, "builder_fn": 32, "decor": [32, 42], "regist": 32, "payload": 32, "builder": 32, "callabl": [32, 42], "wrap": [32, 42], "processor_fn": 32, "processor": [32, 42], "inputinterfac": 32, "sub": 32, "parent": [32, 42], "child": 32, "end_of_interaction_str": 33, "detect": 33, "convert": [33, 42], "old_key2new_kei": 33, "flatten_data_dict": 33, "map": [33, 39], "old": [33, 39, 42], "flatten": [33, 42], "unflatten": [33, 42], "afterward": 33, "keys_to_delet": [33, 39], "extract": [33, 37, 42], "destin": 33, "nested_kei": [33, 42], "key2valu": 33, "last_message_onli": 33, "regex": 33, "assert_uniqu": 33, "regex_fallback": 33, "match_group": 33, "occurr": 33, "assert": 33, "__len__": 34, "__getitem__": 34, "idx": 34, "sample_data": 34, "10000": 35, "entri": 35, "ignor": [35, 42], "__lock": 35, "lock": 35, "batch": 36, "dataload": 36, "path_to_cach": 36, "keys_to_writ": 36, "output_dir": [36, 42], "n_worker": 36, "worker": 36, "single_thread": 36, "flows_with_interfac": 36, "n_independent_sampl": 36, "fault_tolerant_mod": 36, "n_batch_retri": 36, "wait_time_between_retri": 36, "instati": 36, "crash": 36, "occur": 36, "retri": 36, "tupl": [36, 42], "mod": 37, "flow_mod_spec": 37, "file_path": [37, 42], "otherwis": 37, "sy": 37, "base_dir": 37, "cache_mod_dir": 37, "legal_revis": 37, "dir": 37, "all_overwrit": 37, "_sync_depend": 37, "previous_synced_flow_mod_spec": 37, "mod_nam": 37, "caller_module_nam": 37, "previous": 37, "josifosk": 37, "default_cache_path": 37, "augment": 37, "flow_mod_summary_path": 37, "flow_mod_summari": 37, "modelnam": 37, "snapshot": 37, "compris": 38, "keys_to_copi": 39, "keys_to_set": 39, "privat": 40, "consol": 40, "data_dict": [40, 42], "dst_flow": 40, "built": 40, "raw_respons": 40, "input_message_id": 40, "identif": 40, "keys_deleted_from_namespac": 40, "namespac": 40, "subset": 41, "existing_id": 42, "against": 42, "buffer": 42, "encod": 42, "base64": 42, "image_path": 42, "python_file_path": 42, "key_to_find": 42, "new_valu": 42, "current_path": 42, "note1": 42, "whever": 42, "note2": 42, "product": 42, "d": 42, "parent_kei": 42, "sep": 42, "separ": 42, "datetim": 42, "nanosecond": 42, "function_nam": 42, "create_if_not_exist": 42, "key1": 42, "key2": 42, "key3": 42, "search_dict": 42, "leaf_processor": 42, "leaf": 42, "module_path": 42, "path_to_fil": 42, "gzip": 42, "jsonlin": 42, "outputs_dir": 42, "cl": 42, "w": 42, "pickle_path": 42, "pickl": 42, "obj": 42, "handler": 42, "logger": 42, "set_logger_dir": 42, "scriptnam": 42, "k": 42, "Will": 42, "tensorboard": 42, "resum": 42, "train": 42, "interrupt": 42, "noth": 42, "disabl": 42, "explicit": 42, "formatt": 42, "levelnam": 42, "filenam": 42, "bound": 42, "affect": 42, "suppos": 42, "unless": 42, "checkpoint": 42, "50": 42, "fatal": 42, "20": 42, "dirnam": 42, "verbos": 42, "flows_no_advisory_warn": 42, "emit": 42, "hit": 42, "assumpt": 42, "aren": 42, "frame": 42, "dictconfig": 42, "print_ord": 42, "save_to_fil": 42, "rich": 42, "compon": 42, "subpackag": 43, "modul": 43}, "objects": {"": [[30, 0, 0, "-", "aiflows"]], "aiflows": [[31, 0, 0, "-", "backends"], [32, 0, 0, "-", "base_flows"], [33, 0, 0, "-", "data_transformations"], [34, 0, 0, "-", "datasets"], [35, 0, 0, "-", "flow_cache"], [36, 0, 0, "-", "flow_launchers"], [37, 0, 0, "-", "flow_verse"], [38, 0, 0, "-", "history"], [39, 0, 0, "-", "interfaces"], [40, 0, 0, "-", "messages"], [41, 0, 0, "-", "prompt_template"], [42, 0, 0, "-", "utils"]], "aiflows.backends": [[31, 0, 0, "-", "api_info"], [31, 0, 0, "-", "llm_lite"]], "aiflows.backends.api_info": [[31, 1, 1, "", "ApiInfo"]], "aiflows.backends.api_info.ApiInfo": [[31, 2, 1, "", "api_base"], [31, 2, 1, "", "api_key"], [31, 2, 1, "", "api_version"], [31, 2, 1, "", "backend_used"]], "aiflows.backends.llm_lite": [[31, 1, 1, "", "LiteLLMBackend"], [31, 4, 1, "", "merge_delta_to_stream"], [31, 4, 1, "", "merge_streams"]], "aiflows.backends.llm_lite.LiteLLMBackend": [[31, 3, 1, "", "get_key"], [31, 3, 1, "", "make_unique_api_info_key"]], "aiflows.base_flows": [[32, 0, 0, "-", "abstract"], [32, 0, 0, "-", "atomic"], [32, 0, 0, "-", "branching"], [32, 0, 0, "-", "circular"], [32, 0, 0, "-", "composite"], [32, 0, 0, "-", "sequential"]], "aiflows.base_flows.abstract": [[32, 1, 1, "", "Flow"]], "aiflows.base_flows.abstract.Flow": [[32, 2, 1, "", "REQUIRED_KEYS_CONFIG"], [32, 2, 1, "", "SUPPORTS_CACHING"], [32, 2, 1, "", "flow_config"], [32, 2, 1, "", "flow_state"], [32, 3, 1, "", "get_config"], [32, 3, 1, "", "get_interface_description"], [32, 2, 1, "", "history"], [32, 3, 1, "", "instantiate_from_config"], [32, 3, 1, "", "instantiate_from_default_config"], [32, 3, 1, "", "instantiate_with_overrides"], [32, 5, 1, "", "name"], [32, 3, 1, "", "reset"], [32, 3, 1, "", "run"], [32, 3, 1, "", "set_up_flow_state"], [32, 3, 1, "", "type"]], "aiflows.base_flows.atomic": [[32, 1, 1, "", "AtomicFlow"]], "aiflows.base_flows.atomic.AtomicFlow": [[32, 2, 1, "", "flow_config"], [32, 2, 1, "", "flow_state"], [32, 2, 1, "", "history"], [32, 3, 1, "", "type"]], "aiflows.base_flows.branching": [[32, 1, 1, "", "BranchingFlow"]], "aiflows.base_flows.branching.BranchingFlow": [[32, 3, 1, "", "run"], [32, 2, 1, "", "subflows"], [32, 3, 1, "", "type"]], "aiflows.base_flows.circular": [[32, 1, 1, "", "CircularFlow"], [32, 1, 1, "", "TopologyNode"]], "aiflows.base_flows.circular.CircularFlow": [[32, 2, 1, "", "REQUIRED_KEYS_CONFIG"], [32, 3, 1, "", "input_msg_payload_builder"], [32, 3, 1, "", "output_msg_payload_processor"], [32, 3, 1, "", "run"], [32, 2, 1, "", "subflows"], [32, 3, 1, "", "type"]], "aiflows.base_flows.composite": [[32, 1, 1, "", "CompositeFlow"]], "aiflows.base_flows.composite.CompositeFlow": [[32, 2, 1, "", "REQUIRED_KEYS_CONFIG"], [32, 3, 1, "", "instantiate_from_config"], [32, 2, 1, "", "subflows"], [32, 3, 1, "", "type"]], "aiflows.base_flows.sequential": [[32, 1, 1, "", "SequentialFlow"]], "aiflows.base_flows.sequential.SequentialFlow": [[32, 2, 1, "", "flow_config"], [32, 2, 1, "", "flow_state"], [32, 2, 1, "", "history"], [32, 2, 1, "", "subflows"], [32, 3, 1, "", "type"]], "aiflows.data_transformations": [[33, 0, 0, "-", "abstract"], [33, 0, 0, "-", "end_of_interaction"], [33, 0, 0, "-", "json"], [33, 0, 0, "-", "key_copy"], [33, 0, 0, "-", "key_delete"], [33, 0, 0, "-", "key_match_input"], [33, 0, 0, "-", "key_rename"], [33, 0, 0, "-", "key_select"], [33, 0, 0, "-", "key_set"], [33, 0, 0, "-", "print_previous_messages"], [33, 0, 0, "-", "regex_extractor_first"], [33, 0, 0, "-", "unnesting_dict"]], "aiflows.data_transformations.abstract": [[33, 1, 1, "", "DataTransformation"]], "aiflows.data_transformations.end_of_interaction": [[33, 1, 1, "", "EndOfInteraction"]], "aiflows.data_transformations.json": [[33, 1, 1, "", "Json2Obj"], [33, 1, 1, "", "Obj2Json"]], "aiflows.data_transformations.key_copy": [[33, 1, 1, "", "KeyCopy"]], "aiflows.data_transformations.key_delete": [[33, 1, 1, "", "KeyDelete"]], "aiflows.data_transformations.key_match_input": [[33, 1, 1, "", "KeyMatchInput"]], "aiflows.data_transformations.key_rename": [[33, 1, 1, "", "KeyRename"]], "aiflows.data_transformations.key_select": [[33, 1, 1, "", "KeySelect"]], "aiflows.data_transformations.key_set": [[33, 1, 1, "", "KeySet"]], "aiflows.data_transformations.print_previous_messages": [[33, 1, 1, "", "PrintPreviousMessages"]], "aiflows.data_transformations.regex_extractor_first": [[33, 1, 1, "", "RegexFirstOccurrenceExtractor"]], "aiflows.data_transformations.unnesting_dict": [[33, 1, 1, "", "UnNesting"]], "aiflows.datasets": [[34, 0, 0, "-", "abstract"], [34, 0, 0, "-", "demonstrations_11"], [34, 0, 0, "-", "outputs"]], "aiflows.datasets.abstract": [[34, 1, 1, "", "AbstractDataset"]], "aiflows.datasets.demonstrations_11": [[34, 1, 1, "", "GenericDemonstrationsDataset"]], "aiflows.datasets.outputs": [[34, 1, 1, "", "OutputsDataset"]], "aiflows.datasets.outputs.OutputsDataset": [[34, 3, 1, "", "get_output_data"]], "aiflows.flow_cache": [[35, 0, 0, "-", "flow_cache"]], "aiflows.flow_cache.flow_cache": [[35, 1, 1, "", "CACHING_PARAMETERS"], [35, 1, 1, "", "CachingKey"], [35, 1, 1, "", "CachingValue"], [35, 1, 1, "", "FlowCache"], [35, 4, 1, "", "clear_cache"], [35, 4, 1, "", "get_cache_dir"]], "aiflows.flow_cache.flow_cache.CACHING_PARAMETERS": [[35, 2, 1, "", "cache_dir"], [35, 2, 1, "", "do_caching"], [35, 2, 1, "", "max_cached_entries"]], "aiflows.flow_cache.flow_cache.CachingKey": [[35, 2, 1, "", "flow"], [35, 3, 1, "", "hash_string"], [35, 2, 1, "", "input_data"], [35, 2, 1, "", "keys_to_ignore_for_hash"]], "aiflows.flow_cache.flow_cache.CachingValue": [[35, 2, 1, "", "full_state"], [35, 2, 1, "", "history_messages_created"], [35, 2, 1, "", "output_results"]], "aiflows.flow_cache.flow_cache.FlowCache": [[35, 3, 1, "", "get"], [35, 3, 1, "", "pop"], [35, 3, 1, "", "set"]], "aiflows.flow_launchers": [[36, 0, 0, "-", "abstract"], [36, 0, 0, "-", "flow_API_launcher"]], "aiflows.flow_launchers.abstract": [[36, 1, 1, "", "BaseLauncher"], [36, 1, 1, "", "MultiThreadedAPILauncher"]], "aiflows.flow_launchers.abstract.BaseLauncher": [[36, 3, 1, "", "predict"], [36, 3, 1, "", "predict_dataloader"], [36, 3, 1, "", "write_batch_output"]], "aiflows.flow_launchers.abstract.MultiThreadedAPILauncher": [[36, 3, 1, "", "predict_dataloader"]], "aiflows.flow_launchers.flow_API_launcher": [[36, 1, 1, "", "FlowLauncher"]], "aiflows.flow_launchers.flow_API_launcher.FlowLauncher": [[36, 3, 1, "", "launch"], [36, 3, 1, "", "predict"], [36, 3, 1, "", "predict_batch"], [36, 3, 1, "", "predict_sample"]], "aiflows.flow_verse": [[37, 0, 0, "-", "loading"], [37, 0, 0, "-", "utils"]], "aiflows.flow_verse.loading": [[37, 1, 1, "", "FlowModuleSpec"], [37, 1, 1, "", "FlowModuleSpecSummary"], [37, 4, 1, "", "add_to_sys_path"], [37, 4, 1, "", "create_empty_flow_mod_file"], [37, 4, 1, "", "create_init_py"], [37, 4, 1, "", "extract_commit_hash_from_cache_mod_dir"], [37, 4, 1, "", "fetch_local"], [37, 4, 1, "", "fetch_remote"], [37, 4, 1, "", "is_local_revision"], [37, 4, 1, "", "is_local_sync_dir_valid"], [37, 4, 1, "", "is_sync_dir_modified"], [37, 4, 1, "", "remove_dir_or_link"], [37, 4, 1, "", "retrive_commit_hash_from_remote"], [37, 4, 1, "", "sync_dependencies"], [37, 4, 1, "", "sync_local_dep"], [37, 4, 1, "", "sync_remote_dep"], [37, 4, 1, "", "validate_and_augment_dependency"], [37, 4, 1, "", "write_flow_mod_summary"], [37, 4, 1, "", "write_or_append_gitignore"]], "aiflows.flow_verse.loading.FlowModuleSpec": [[37, 3, 1, "", "build_mod_id"], [37, 2, 1, "", "cache_dir"], [37, 2, 1, "", "commit_hash"], [37, 5, 1, "", "mod_id"], [37, 2, 1, "", "repo_id"], [37, 2, 1, "", "revision"], [37, 2, 1, "", "sync_dir"]], "aiflows.flow_verse.loading.FlowModuleSpecSummary": [[37, 3, 1, "", "add_mod"], [37, 5, 1, "", "cache_root"], [37, 3, 1, "", "from_flow_mod_file"], [37, 3, 1, "", "get_mod"], [37, 3, 1, "", "get_mods"], [37, 3, 1, "", "serialize"], [37, 5, 1, "", "sync_root"]], "aiflows.flow_verse.utils": [[37, 4, 1, "", "build_hf_cache_path"], [37, 4, 1, "", "is_local_revision"]], "aiflows.history": [[38, 0, 0, "-", "flow_history"]], "aiflows.history.flow_history": [[38, 1, 1, "", "FlowHistory"]], "aiflows.history.flow_history.FlowHistory": [[38, 3, 1, "", "add_message"], [38, 3, 1, "", "get_last_n_messages"], [38, 3, 1, "", "to_list"], [38, 3, 1, "", "to_string"]], "aiflows.interfaces": [[39, 0, 0, "-", "abstract"], [39, 0, 0, "-", "key_interface"]], "aiflows.interfaces.abstract": [[39, 1, 1, "", "Interface"]], "aiflows.interfaces.key_interface": [[39, 1, 1, "", "KeyInterface"]], "aiflows.messages": [[40, 0, 0, "-", "abstract"], [40, 0, 0, "-", "flow_message"]], "aiflows.messages.abstract": [[40, 1, 1, "", "Message"]], "aiflows.messages.abstract.Message": [[40, 2, 1, "", "created_at"], [40, 2, 1, "", "created_by"], [40, 2, 1, "", "data"], [40, 2, 1, "", "message_id"], [40, 2, 1, "", "message_type"], [40, 2, 1, "", "private_keys"], [40, 3, 1, "", "to_dict"], [40, 3, 1, "", "to_string"]], "aiflows.messages.flow_message": [[40, 1, 1, "", "InputMessage"], [40, 1, 1, "", "OutputMessage"], [40, 1, 1, "", "UpdateMessage_ChatMessage"], [40, 1, 1, "", "UpdateMessage_FullReset"], [40, 1, 1, "", "UpdateMessage_Generic"], [40, 1, 1, "", "UpdateMessage_NamespaceReset"]], "aiflows.messages.flow_message.InputMessage": [[40, 3, 1, "", "build"], [40, 2, 1, "", "created_at"], [40, 2, 1, "", "created_by"], [40, 2, 1, "", "data"], [40, 2, 1, "", "message_id"], [40, 2, 1, "", "message_type"], [40, 2, 1, "", "private_keys"], [40, 3, 1, "", "to_string"]], "aiflows.messages.flow_message.OutputMessage": [[40, 2, 1, "", "created_at"], [40, 2, 1, "", "created_by"], [40, 2, 1, "", "data"], [40, 3, 1, "", "get_output_data"], [40, 2, 1, "", "message_id"], [40, 2, 1, "", "message_type"], [40, 2, 1, "", "private_keys"], [40, 3, 1, "", "to_string"]], "aiflows.messages.flow_message.UpdateMessage_ChatMessage": [[40, 2, 1, "", "created_at"], [40, 2, 1, "", "created_by"], [40, 2, 1, "", "data"], [40, 2, 1, "", "message_id"], [40, 2, 1, "", "message_type"], [40, 2, 1, "", "private_keys"], [40, 3, 1, "", "to_string"]], "aiflows.messages.flow_message.UpdateMessage_FullReset": [[40, 2, 1, "", "created_at"], [40, 2, 1, "", "created_by"], [40, 2, 1, "", "data"], [40, 2, 1, "", "message_id"], [40, 2, 1, "", "message_type"], [40, 2, 1, "", "private_keys"], [40, 3, 1, "", "to_string"]], "aiflows.messages.flow_message.UpdateMessage_Generic": [[40, 2, 1, "", "created_at"], [40, 2, 1, "", "created_by"], [40, 2, 1, "", "data"], [40, 2, 1, "", "message_id"], [40, 2, 1, "", "message_type"], [40, 2, 1, "", "private_keys"], [40, 3, 1, "", "to_string"]], "aiflows.messages.flow_message.UpdateMessage_NamespaceReset": [[40, 2, 1, "", "created_at"], [40, 2, 1, "", "created_by"], [40, 2, 1, "", "data"], [40, 2, 1, "", "message_id"], [40, 2, 1, "", "message_type"], [40, 2, 1, "", "private_keys"], [40, 3, 1, "", "to_string"]], "aiflows.prompt_template": [[41, 0, 0, "-", "jinja2_prompts"]], "aiflows.prompt_template.jinja2_prompts": [[41, 1, 1, "", "JinjaPrompt"]], "aiflows.prompt_template.jinja2_prompts.JinjaPrompt": [[41, 3, 1, "", "format"], [41, 3, 1, "", "partial"]], "aiflows.utils": [[42, 0, 0, "-", "general_helpers"], [42, 0, 0, "-", "io_utils"], [42, 0, 0, "-", "logging"], [42, 0, 0, "-", "rich_utils"]], "aiflows.utils.general_helpers": [[42, 4, 1, "", "create_unique_id"], [42, 4, 1, "", "encode_from_buffer"], [42, 4, 1, "", "encode_image"], [42, 4, 1, "", "exception_handler"], [42, 4, 1, "", "extract_top_level_function_names"], [42, 4, 1, "", "find_replace_in_dict"], [42, 4, 1, "", "flatten_dict"], [42, 4, 1, "", "get_current_datetime_ns"], [42, 4, 1, "", "get_function_from_name"], [42, 4, 1, "", "get_predictions_dir_path"], [42, 4, 1, "", "log_suggest_help"], [42, 4, 1, "", "nested_keys_pop"], [42, 4, 1, "", "nested_keys_search"], [42, 4, 1, "", "nested_keys_update"], [42, 4, 1, "", "process_config_leafs"], [42, 4, 1, "", "python_file_path_to_module_path"], [42, 4, 1, "", "python_module_path_to_file_path"], [42, 4, 1, "", "read_gzipped_jsonlines"], [42, 4, 1, "", "read_jsonlines"], [42, 4, 1, "", "read_outputs"], [42, 4, 1, "", "read_yaml_file"], [42, 4, 1, "", "recursive_dictionary_update"], [42, 4, 1, "", "try_except_decorator"], [42, 4, 1, "", "unflatten_dict"], [42, 4, 1, "", "validate_flow_config"], [42, 4, 1, "", "write_gzipped_jsonlines"], [42, 4, 1, "", "write_jsonlines"], [42, 4, 1, "", "write_outputs"]], "aiflows.utils.io_utils": [[42, 4, 1, "", "load_pickle"], [42, 4, 1, "", "recursive_json_serialize"]], "aiflows.utils.logging": [[42, 4, 1, "", "add_handler"], [42, 4, 1, "", "auto_set_dir"], [42, 4, 1, "", "disable_default_handler"], [42, 4, 1, "", "disable_propagation"], [42, 4, 1, "", "enable_default_handler"], [42, 4, 1, "", "enable_explicit_format"], [42, 4, 1, "", "enable_propagation"], [42, 4, 1, "", "get_log_levels_dict"], [42, 4, 1, "", "get_logger"], [42, 4, 1, "", "get_logger_dir"], [42, 4, 1, "", "get_verbosity"], [42, 4, 1, "", "remove_handler"], [42, 4, 1, "", "reset_format"], [42, 4, 1, "", "set_dir"], [42, 4, 1, "", "set_verbosity"], [42, 4, 1, "", "set_verbosity_debug"], [42, 4, 1, "", "set_verbosity_error"], [42, 4, 1, "", "set_verbosity_info"], [42, 4, 1, "", "set_verbosity_warning"], [42, 4, 1, "", "warning_advice"], [42, 4, 1, "", "warning_once"]], "aiflows.utils.rich_utils": [[42, 4, 1, "", "print_config_tree"]]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute", "3": "py:method", "4": "py:function", "5": "py:property"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "method", "Python method"], "4": ["py", "function", "Python function"], "5": ["py", "property", "Python property"]}, "titleterms": {"citat": 0, "autom": [1, 3, 4], "document": [1, 2, 4, 19], "flow": [1, 3, 4, 10, 11, 12, 13, 14, 15, 16, 18, 19, 22, 23, 24, 25, 27, 29], "flowvers": [1, 3, 4, 10, 14, 17, 18, 27, 29], "1": [1, 2, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 23], "your": [1, 3, 4, 5, 9, 10, 11, 13, 17, 25], "sphinx": [1, 2], "format": [1, 2], "2": [1, 2, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 23], "instal": [1, 17, 28], "pydoc": 1, "markdown": 1, "3": [1, 2, 4, 5, 6, 8, 9, 10, 17, 18, 21], "navig": 1, "directori": 1, "4": [1, 2, 5, 6, 8, 18], "build": 1, "code": [2, 3, 4, 5], "standard": [2, 3], "simplic": 2, "readabl": 2, "best": 2, "practic": [2, 5], "docstr": 2, "5": [2, 4, 5, 8, 18, 26], "backward": 2, "compat": 2, "6": [2, 18], "thorough": 2, "test": [2, 25], "7": [2, 18], "coverag": 2, "8": 2, "featur": [2, 3, 5], "contribut": [3, 4, 5, 7, 8, 9, 25], "guid": [3, 21, 23, 26], "tabl": 3, "content": [3, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42], "prefac": 3, "want": 3, "To": [3, 6, 8], "aiflow": [3, 5, 6, 8, 9, 17, 18, 27, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43], "connect": 3, "With": [3, 4, 6, 9, 16, 18], "like": [3, 6], "mind": 3, "contributor": [3, 9], "how": [3, 6, 8, 9], "get": [3, 5, 6, 26], "help": [3, 6], "librari": [3, 5, 9, 10], "bug": [3, 5], "fix": [3, 5], "addit": [3, 19], "creat": [3, 4, 5, 25], "new": [3, 5], "exist": [3, 4, 5, 25], "gener": 3, "flowcard": 3, "readm": 3, "wall": [3, 9], "share": [3, 9], "public": [3, 9], "project": [3, 6, 8], "licenc": [3, 8], "inform": [3, 5], "spoiler": 3, "alert": 3, "It": 3, "": [3, 5, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 21, 23, 24, 25, 26], "open": [3, 8], "sourc": [3, 8], "complet": 3, "free": 3, "recommend": 4, "workflow": [4, 23, 25], "check": [4, 5], "talk": [4, 5], "commun": [4, 5, 6, 9], "develop": [4, 23, 25, 26], "enhanc": 4, "an": [4, 5, 25], "organ": 4, "workspac": 4, "leverag": 4, "templat": 4, "precis": 4, "consid": 4, "option": [4, 17, 28], "engag": 4, "dialogu": 4, "discord": [4, 5], "ad": 5, "step": [5, 10, 17], "identifi": 5, "report": 5, "issu": [5, 6, 8], "discuss": [5, 6], "start": [5, 10, 26], "pull": [5, 10], "request": 5, "pr": 5, "0": 5, "fork": 5, "repositori": 5, "clone": 5, "branch": [5, 32], "make": 5, "chang": 5, "And": 5, "adher": 5, "commit": 5, "push": [5, 17], "link": [5, 17], "address": 5, "review": 5, "concern": 5, "feedback": [5, 6, 16, 18, 19], "style": 5, "look": 6, "collabor": 6, "faq": 6, "i": [6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 21, 23, 24, 25, 27, 29], "m": [6, 8], "encount": 6, "debug": 6, "can": [6, 8, 9], "where": 6, "my": [6, 9], "work": [6, 9], "brainstorm": 6, "idea": 6, "them": 6, "don": 6, "t": 6, "have": [6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 21, 23, 24, 25], "bandwidth": 6, "time": 6, "relat": 6, "would": 6, "find": 6, "somebodi": 6, "what": [6, 12, 14, 15, 16], "should": [6, 27, 29], "do": [6, 8, 9], "info": 8, "frequent": 8, "ask": 8, "question": 8, "worri": 8, "about": 8, "licens": 8, "There": 8, "ani": 8, "catch": 8, "pai": 8, "anyth": 8, "us": [8, 10, 27, 29], "commerci": 8, "ar": [8, 9], "restrict": 8, "codebas": 9, "appear": 9, "next": 9, "releas": 9, "cite": 9, "Their": 9, "research": [9, 27, 29], "quick": [10, 26], "section": [10, 11, 12, 13, 14, 15, 16, 17], "run": [10, 12, 15, 16], "first": [10, 11, 13], "qa": 10, "from": [10, 27, 29], "By": [10, 11, 12, 13, 14, 15, 16, 17, 18, 21, 23, 24, 25], "tutori": [10, 11, 12, 13, 14, 15, 16, 17, 18, 23, 24, 25, 26], "end": [10, 11, 12, 13, 14, 15, 16, 17, 18, 21, 23, 24, 25], "Will": [10, 11, 12, 13, 14, 15, 16, 17, 18, 21, 23, 24, 25], "extern": 10, "depend": 10, "playground": 10, "notebook": 10, "atom": [11, 18, 22, 32], "defin": [11, 13], "write": [11, 13], "autogpt": [12, 18, 19], "The": [12, 15, 16, 17, 19], "composit": [13, 18, 32], "sequenti": [13, 32], "introduc": [14, 18], "simpl": [14, 18], "q": [14, 18], "A": [14, 18], "craft": 14, "chatflowmodul": 14, "react": [15, 16, 18], "human": [16, 18, 19], "set": [17, 18], "up": [17, 18], "effici": 17, "folder": 17, "structur": 17, "hug": 17, "face": 17, "api": 17, "kei": 17, "definit": [19, 20, 22], "topologi": 19, "subflow": 19, "memori": 19, "controllerflow": 19, "executorflow": 19, "lctoolflow": 19, "wikisearchatomicflow": 19, "chatatomicflow": [20, 21], "method": [20, 22], "detail": [21, 26], "exampl": [21, 26], "visionatomicflow": 21, "autogptflow": 21, "vision": 22, "modul": [23, 24, 25, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42], "manag": [23, 24], "typic": [23, 25], "introduct": [24, 27, 29], "sync": 24, "namespac": [24, 25], "publish": 25, "own": 25, "over": 25, "under": 25, "min": 26, "20": 26, "10": 26, "nutshel": [27, 29], "why": [27, 29], "As": [27, 29], "you": [27, 29], "benefit": [27, 29], "practition": [27, 29], "other": 28, "bleed": 28, "edg": 28, "version": 28, "packag": [30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42], "subpackag": 30, "backend": 31, "submodul": [31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42], "api_info": 31, "llm_lite": 31, "base_flow": 32, "abstract": [32, 33, 34, 36, 39, 40], "circular": 32, "data_transform": 33, "end_of_interact": 33, "json": 33, "key_copi": 33, "key_delet": 33, "key_match_input": 33, "key_renam": 33, "key_select": 33, "key_set": 33, "print_previous_messag": 33, "regex_extractor_first": 33, "unnesting_dict": 33, "dataset": 34, "demonstrations_11": 34, "output": 34, "flow_cach": 35, "flow_launch": 36, "flow_api_launch": 36, "flow_vers": 37, "load": 37, "util": [37, 42], "histori": 38, "flow_histori": 38, "interfac": 39, "key_interfac": 39, "messag": 40, "flow_messag": 40, "prompt_templ": 41, "jinja2_prompt": 41, "general_help": 42, "io_util": 42, "log": 42, "rich_util": 42}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx": 57}, "alltitles": {"Citation": [[0, "citation"]], "Automating the documentation of a Flow on the FlowVerse": [[1, "automating-the-documentation-of-a-flow-on-the-flowverse"]], "1. Document Your Flow in Sphinx Format": [[1, "document-your-flow-in-sphinx-format"]], "2. Install pydoc-markdown": [[1, "install-pydoc-markdown"]], "3. Navigate to Your Flow Directory": [[1, "navigate-to-your-flow-directory"]], "4. Build the Markdown": [[1, "build-the-markdown"]], "Coding Standards": [[2, "coding-standards"]], "1. Simplicity and Readability": [[2, "simplicity-and-readability"]], "2. Best Practices": [[2, "best-practices"]], "3. Documentation": [[2, "documentation"]], "4. Docstrings in Sphinx Format": [[2, "docstrings-in-sphinx-format"]], "5. Backward Compatibility": [[2, "backward-compatibility"]], "6. Thorough Testing": [[2, "thorough-testing"]], "7. Test Coverage": [[2, "test-coverage"]], "8. Feature Tests": [[2, "feature-tests"]], "Contribution Guide": [[3, "contribution-guide"]], "Table of Contents": [[3, "table-of-contents"]], "Preface": [[3, "preface"]], "Want To Contribute to aiFlows?": [[3, "want-to-contribute-to-aiflows"]], "Connecting With Like-Minded Contributors & How To Get Help ?": [[3, "connecting-with-like-minded-contributors-how-to-get-help"]], "Contributing To aiFlows Library: Bug Fixes and Feature Additions Guide": [[3, "contributing-to-aiflows-library-bug-fixes-and-feature-additions-guide"]], "Contributing To the FlowVerse: Creating New Flows and Contributing To Existing Flows Guide": [[3, "contributing-to-the-flowverse-creating-new-flows-and-contributing-to-existing-flows-guide"]], "Automating the Generation of FlowCards (README) for the FlowVerse": [[3, "automating-the-generation-of-flowcards-readme-for-the-flowverse"]], "Coding Standards for aiFlows": [[3, "coding-standards-for-aiflows"]], "Contributors Wall and Sharing/Publicizing Your aiFlows Project or Flow": [[3, "contributors-wall-and-sharing-publicizing-your-aiflows-project-or-flow"]], "Licence Information (Spoiler Alert: It\u2019s Open-Source and Completely Free!)": [[3, "licence-information-spoiler-alert-its-open-source-and-completely-free"]], "Recommended Workflow for Contributing to a Flow on the FlowVerse": [[4, "recommended-workflow-for-contributing-to-a-flow-on-the-flowverse"]], "1. Check Existing Flows & Talk to the Community": [[4, "check-existing-flows-talk-to-the-community"]], "2. Developing Your Flow - Creating or Enhancing for Contribution": [[4, "developing-your-flow-creating-or-enhancing-for-contribution"]], "2.1. Create an Organized Workspace": [[4, "create-an-organized-workspace"]], "2.2 Leverage the Flow Template": [[4, "leverage-the-flow-template"]], "2.3. Code With Precision": [[4, "code-with-precision"]], "2.5. Consider Automation for Documentation (Optional)": [[4, "consider-automation-for-documentation-optional"]], "3. Engage in Dialogue on Discord": [[4, "engage-in-dialogue-on-discord"]], "Contributing to aiFlows Library (for bug fixes and adding features)": [[5, "contributing-to-aiflows-library-for-bug-fixes-and-adding-features"]], "Step 1: Identifying and Reporting an Issue / Bug": [[5, "step-1-identifying-and-reporting-an-issue-bug"]], "1.1. Check Existing Issues & Talk to the Community": [[5, "check-existing-issues-talk-to-the-community"]], "Community Discussion on Discord:": [[5, "community-discussion-on-discord"]], "1.2. Creating a New Issue": [[5, "creating-a-new-issue"]], "Step 2: Getting Started with a Pull Request (PR)": [[5, "step-2-getting-started-with-a-pull-request-pr"]], "2.0. Inform the Community": [[5, "inform-the-community"]], "2.1. Fork the Repository": [[5, "fork-the-repository"]], "2.2. Clone Your Fork": [[5, "clone-your-fork"]], "2.3. Create a New Branch": [[5, "create-a-new-branch"]], "Step 3: Coding and Making a Pull Request": [[5, "step-3-coding-and-making-a-pull-request"]], "3.1 Make Changes & And adhere to aiFlow\u2019s coding practices": [[5, "make-changes-and-adhere-to-aiflow-s-coding-practices"]], "3.2. Commit Changes": [[5, "commit-changes"]], "3.3. Push Changes": [[5, "push-changes"]], "3.4. Create a Pull Request": [[5, "create-a-pull-request"]], "3.5. Link the pull request to an issue": [[5, "link-the-pull-request-to-an-issue"]], "Step 4: Addressing Reviewer Concerns": [[5, "step-4-addressing-reviewer-concerns"]], "4.1. Reviewer Feedback": [[5, "reviewer-feedback"]], "4.2. Coding Style": [[5, "coding-style"]], "Looking for Collaborators ?": [[6, "looking-for-collaborators"]], "Looking for Collaborators - FAQ": [[6, "looking-for-collaborators-faq"]], "1. I\u2019m Encountering Issues With Debugging. How Can the Community Help?": [[6, "im-encountering-issues-with-debugging-how-can-the-community-help"]], "2. Where Can I Get Feedback on My Work?": [[6, "where-can-i-get-feedback-on-my-work"]], "3. I\u2019m Looking To Brainstorm Ideas. Where Can I Discuss Them With the Community?": [[6, "im-looking-to-brainstorm-ideas-where-can-i-discuss-them-with-the-community"]], "4. I Don\u2019t Have the Bandwidth/Time To Work on a Project Related to aiFlows and Would Like To Find Somebody To Collaborate With. What Should I Do?": [[6, "i-dont-have-the-bandwidth-time-to-work-on-a-project-related-to-aiflows-and-would-like-to-find-somebody-to-collaborate-with-what-should-i-do"]], "Contribute": [[7, "contribute"]], "Licence Info: Frequently Asked Questions": [[8, "licence-info-frequently-asked-questions"]], "1. I\u2019m Worried About License Issues. Is aiFlows Open-Source?": [[8, "im-worried-about-license-issues-is-aiflows-open-source"]], "2. Is There Any Catch? Do I Have To Pay Anything?": [[8, "is-there-any-catch-do-i-have-to-pay-anything"]], "3. Can I Use aiFlows in Commercial Projects?": [[8, "can-i-use-aiflows-in-commercial-projects"]], "4. Are There Any Restrictions on How I Can Use aiFlows?": [[8, "are-there-any-restrictions-on-how-i-can-use-aiflows"]], "5. How Can I Contribute to aiFlows?": [[8, "how-can-i-contribute-to-aiflows"]], "Publicizing Your Work": [[9, "publicizing-your-work"]], "1. Do Contributors to aiFlows\u2019 Codebase Appear on the Contributors Wall in the Library\u2019s Next Release?": [[9, "do-contributors-to-aiflows-codebase-appear-on-the-contributors-wall-in-the-librarys-next-release"]], "2. How Can I Share My Work With the Community?": [[9, "how-can-i-share-my-work-with-the-community"]], "3. Are Contributors Cited for Their Contributions to Research?": [[9, "are-contributors-cited-for-their-contributions-to-research"]], "Quick Start": [[10, "quick-start"]], "Section 1: Running your First QA Flow using a Flow from the FlowVerse": [[10, "section-1-running-your-first-qa-flow-using-a-flow-from-the-flowverse"]], "By the Tutorial\u2019s End, I Will Have\u2026": [[10, "by-the-tutorial-s-end-i-will-have"], [11, "by-the-tutorial-s-end-i-will-have"], [12, "by-the-tutorial-s-end-i-will-have"], [13, "by-the-tutorial-s-end-i-will-have"], [14, "by-the-tutorial-s-end-i-will-have"], [15, "by-the-tutorial-s-end-i-will-have"], [16, "by-the-tutorial-s-end-i-will-have"], [17, "by-the-tutorial-s-end-i-will-have"], [18, "by-the-tutorial-s-end-i-will-have"], [18, "id1"], [18, "id2"], [18, "id3"], [18, "id4"], [18, "id5"], [18, "id6"], [23, "by-the-tutorial-s-end-i-will-have"], [23, "id1"], [24, "by-the-tutorial-s-end-i-will-have"], [25, "by-the-tutorial-s-end-i-will-have"]], "Step 1: Pull a Flow From the FlowVerse": [[10, "step-1-pull-a-flow-from-the-flowverse"]], "External Library Dependencies": [[10, "external-library-dependencies"]], "Step 3: Run the Flow!": [[10, "step-3-run-the-flow"]], "Section 2: FlowVerse Playground Notebook": [[10, "section-2-flowverse-playground-notebook"]], "Atomic Flow Tutorial": [[11, "atomic-flow-tutorial"]], "Section 1: Defining Atomic Flows": [[11, "section-1-defining-atomic-flows"]], "Section 2: Writing Your First Atomic Flow": [[11, "section-2-writing-your-first-atomic-flow"]], "AutoGPT Tutorial": [[12, "autogpt-tutorial"]], "Section 1: What\u2019s The AutoGPT flow ?": [[12, "section-1-what-s-the-autogpt-flow"]], "Section 2 Running the AutoGPT Flow": [[12, "section-2-running-the-autogpt-flow"]], "Composite Flow Tutorial": [[13, "composite-flow-tutorial"]], "Section 1: Defining Composite Flows and Sequential Flows": [[13, "section-1-defining-composite-flows-and-sequential-flows"]], "Section 2: Writing Your First Sequential Flow": [[13, "section-2-writing-your-first-sequential-flow"]], "Introducing the FlowVerse with a Simple Q&A Flow": [[14, "introducing-the-flowverse-with-a-simple-q-a-flow"]], "Section 1: What\u2019s the FlowVerse ?": [[14, "section-1-what-s-the-flowverse"]], "Section 2: Crafting a Simple Q&A Flow with the ChatFlowModule": [[14, "section-2-crafting-a-simple-q-a-flow-with-the-chatflowmodule"]], "ReAct Tutorial": [[15, "react-tutorial"]], "Section 1: What\u2019s The ReAct Flow ?": [[15, "section-1-what-s-the-react-flow"]], "Section 2: Running The ReAct Flow": [[15, "section-2-running-the-react-flow"]], "ReAct With Human Feedback Tutorial": [[16, "react-with-human-feedback-tutorial"]], "Section 1: What\u2019s The ReAct With Human Feedback Flow ?": [[16, "section-1-what-s-the-react-with-human-feedback-flow"]], "Section 2: Running the ReAct With Human Feedback Flow": [[16, "section-2-running-the-react-with-human-feedback-flow"]], "Setting up aiFlows": [[17, "setting-up-aiflows"]], "Section 1: Installing aiFlows": [[17, "section-1-installing-aiflows"]], "Section 2: Setting Up The FlowVerse": [[17, "section-2-setting-up-the-flowverse"]], "Step 1: Setting up efficient Folder Structure": [[17, "step-1-setting-up-efficient-folder-structure"]], "Step 2: Optional - Linking Hugging Face for FlowVerse Push": [[17, "step-2-optional-linking-hugging-face-for-flowverse-push"]], "Section 3: Setting Up Your API Keys": [[17, "section-3-setting-up-your-api-keys"]], "Tutorials": [[18, "tutorials"]], "1. Setting up aiFlows": [[18, "setting-up-aiflows"]], "2. Atomic Flow Tutorial": [[18, "atomic-flow-tutorial"]], "3. Composite Flow Tutorial": [[18, "composite-flow-tutorial"]], "4. Introducing the FlowVerse with a Simple Q&A Flow": [[18, "introducing-the-flowverse-with-a-simple-q-a-flow"]], "5. ReAct Tutorial": [[18, "react-tutorial"]], "6. ReAct With Human Feedback Tutorial": [[18, "react-with-human-feedback-tutorial"]], "7. AutoGPT Tutorial": [[18, "autogpt-tutorial"]], "AutoGPT": [[19, "autogpt"]], "Definition": [[19, "definition"], [20, "definition"], [22, "definition"]], "Topology": [[19, "topology"]], "Subflows": [[19, "subflows"]], "Memory Flow": [[19, "memory-flow"]], "Additional Documentation:": [[19, "additional-documentation"], [19, "id1"], [19, "id2"], [19, "id3"]], "ControllerFlow": [[19, "controllerflow"]], "ExecutorFlow": [[19, "executorflow"]], "1. The LCToolFlow": [[19, "the-lctoolflow"]], "2. The WikiSearchAtomicFlow": [[19, "the-wikisearchatomicflow"]], "Human Feedback Flow": [[19, "human-feedback-flow"]], "ChatAtomicFlow": [[20, "chatatomicflow"]], "Methods": [[20, "methods"], [22, "methods"]], "Detailed Examples": [[21, "detailed-examples"], [26, "detailed-examples"]], "1. ChatAtomicFlow": [[21, "chatatomicflow"]], "By the Guide\u2019s End, I Will Have\u2026": [[21, "by-the-guide-s-end-i-will-have"], [21, "id1"], [21, "id2"]], "2. VisionAtomicFlow": [[21, "visionatomicflow"]], "3. AutoGPTFlow": [[21, "autogptflow"]], "Vision Atomic Flow": [[22, "vision-atomic-flow"]], "Developer\u2019s Guide": [[23, "developer-s-guide"]], "1. Flow Module Management": [[23, "flow-module-management"]], "2. Typical Developer Workflows": [[23, "typical-developer-workflows"]], "Flow Module Management": [[24, "flow-module-management"]], "Introduction": [[24, "introduction"], [27, "introduction"], [29, "introduction"]], "Flow Modules": [[24, "flow-modules"]], "Syncing Flow Modules": [[24, "syncing-flow-modules"]], "Flow Module Namespace": [[24, "flow-module-namespace"]], "Typical Developer Workflows": [[25, "typical-developer-workflows"]], "Creating, Testing, and Publishing Your Own Flow Module": [[25, "creating-testing-and-publishing-your-own-flow-module"]], "Creating Your Own Flow Module": [[25, "creating-your-own-flow-module"]], "Testing Your Own Flow Module": [[25, "testing-your-own-flow-module"]], "Publishing Your Flow Module": [[25, "publishing-your-flow-module"]], "Contributing to an Existing Flow": [[25, "contributing-to-an-existing-flow"]], "Develop Over an Existing Flow and Publish it Under Your Namespace": [[25, "develop-over-an-existing-flow-and-publish-it-under-your-namespace"]], "Getting Started": [[26, "getting-started"]], "Quick start (\ud83d\udd53 5 min)": [[26, "quick-start-5-min"]], "Tutorial (\ud83d\udd53 20 min)": [[26, "tutorial-20-min"]], "Developer\u2019s Guide (\ud83d\udd53 10 min)": [[26, "developer-s-guide-10-min"]], "Flows in a Nutshell": [[27, "flows-in-a-nutshell"], [29, "flows-in-a-nutshell"]], "FlowVerse in a Nutshell": [[27, "flowverse-in-a-nutshell"], [29, "flowverse-in-a-nutshell"]], "Why should I use aiFlows?": [[27, "why-should-i-use-aiflows"], [29, "why-should-i-use-aiflows"]], "As a researcher, you will benefit from:": [[27, "as-a-researcher-you-will-benefit-from"], [29, "as-a-researcher-you-will-benefit-from"]], "As a practitioner, you will benefit from:": [[27, "as-a-practitioner-you-will-benefit-from"], [29, "as-a-practitioner-you-will-benefit-from"]], "Installation": [[28, "installation"]], "Other Installation Options": [[28, "other-installation-options"]], "Install bleeding-edge version": [[28, "install-bleeding-edge-version"]], "aiflows package": [[30, "aiflows-package"]], "Subpackages": [[30, "subpackages"]], "Module contents": [[30, "module-aiflows"], [31, "module-aiflows.backends"], [32, "module-aiflows.base_flows"], [33, "module-aiflows.data_transformations"], [34, "module-aiflows.datasets"], [35, "module-aiflows.flow_cache"], [36, "module-aiflows.flow_launchers"], [37, "module-aiflows.flow_verse"], [38, "module-aiflows.history"], [39, "module-aiflows.interfaces"], [40, "module-aiflows.messages"], [41, "module-aiflows.prompt_template"], [42, "module-aiflows.utils"]], "aiflows.backends package": [[31, "aiflows-backends-package"]], "Submodules": [[31, "submodules"], [32, "submodules"], [33, "submodules"], [34, "submodules"], [35, "submodules"], [36, "submodules"], [37, "submodules"], [38, "submodules"], [39, "submodules"], [40, "submodules"], [41, "submodules"], [42, "submodules"]], "aiflows.backends.api_info module": [[31, "module-aiflows.backends.api_info"]], "aiflows.backends.llm_lite module": [[31, "module-aiflows.backends.llm_lite"]], "aiflows.base_flows package": [[32, "aiflows-base-flows-package"]], "aiflows.base_flows.abstract module": [[32, "module-aiflows.base_flows.abstract"]], "aiflows.base_flows.atomic module": [[32, "module-aiflows.base_flows.atomic"]], "aiflows.base_flows.branching module": [[32, "module-aiflows.base_flows.branching"]], "aiflows.base_flows.circular module": [[32, "module-aiflows.base_flows.circular"]], "aiflows.base_flows.composite module": [[32, "module-aiflows.base_flows.composite"]], "aiflows.base_flows.sequential module": [[32, "module-aiflows.base_flows.sequential"]], "aiflows.data_transformations package": [[33, "aiflows-data-transformations-package"]], "aiflows.data_transformations.abstract module": [[33, "module-aiflows.data_transformations.abstract"]], "aiflows.data_transformations.end_of_interaction module": [[33, "module-aiflows.data_transformations.end_of_interaction"]], "aiflows.data_transformations.json module": [[33, "module-aiflows.data_transformations.json"]], "aiflows.data_transformations.key_copy module": [[33, "module-aiflows.data_transformations.key_copy"]], "aiflows.data_transformations.key_delete module": [[33, "module-aiflows.data_transformations.key_delete"]], "aiflows.data_transformations.key_match_input module": [[33, "module-aiflows.data_transformations.key_match_input"]], "aiflows.data_transformations.key_rename module": [[33, "module-aiflows.data_transformations.key_rename"]], "aiflows.data_transformations.key_select module": [[33, "module-aiflows.data_transformations.key_select"]], "aiflows.data_transformations.key_set module": [[33, "module-aiflows.data_transformations.key_set"]], "aiflows.data_transformations.print_previous_messages module": [[33, "module-aiflows.data_transformations.print_previous_messages"]], "aiflows.data_transformations.regex_extractor_first module": [[33, "module-aiflows.data_transformations.regex_extractor_first"]], "aiflows.data_transformations.unnesting_dict module": [[33, "module-aiflows.data_transformations.unnesting_dict"]], "aiflows.datasets package": [[34, "aiflows-datasets-package"]], "aiflows.datasets.abstract module": [[34, "module-aiflows.datasets.abstract"]], "aiflows.datasets.demonstrations_11 module": [[34, "module-aiflows.datasets.demonstrations_11"]], "aiflows.datasets.outputs module": [[34, "module-aiflows.datasets.outputs"]], "aiflows.flow_cache package": [[35, "aiflows-flow-cache-package"]], "aiflows.flow_cache.flow_cache module": [[35, "module-aiflows.flow_cache.flow_cache"]], "aiflows.flow_launchers package": [[36, "aiflows-flow-launchers-package"]], "aiflows.flow_launchers.abstract module": [[36, "module-aiflows.flow_launchers.abstract"]], "aiflows.flow_launchers.flow_API_launcher module": [[36, "module-aiflows.flow_launchers.flow_API_launcher"]], "aiflows.flow_verse package": [[37, "aiflows-flow-verse-package"]], "aiflows.flow_verse.loading module": [[37, "module-aiflows.flow_verse.loading"]], "aiflows.flow_verse.utils module": [[37, "module-aiflows.flow_verse.utils"]], "aiflows.history package": [[38, "aiflows-history-package"]], "aiflows.history.flow_history module": [[38, "module-aiflows.history.flow_history"]], "aiflows.interfaces package": [[39, "aiflows-interfaces-package"]], "aiflows.interfaces.abstract module": [[39, "module-aiflows.interfaces.abstract"]], "aiflows.interfaces.key_interface module": [[39, "module-aiflows.interfaces.key_interface"]], "aiflows.messages package": [[40, "aiflows-messages-package"]], "aiflows.messages.abstract module": [[40, "module-aiflows.messages.abstract"]], "aiflows.messages.flow_message module": [[40, "module-aiflows.messages.flow_message"]], "aiflows.prompt_template package": [[41, "aiflows-prompt-template-package"]], "aiflows.prompt_template.jinja2_prompts module": [[41, "module-aiflows.prompt_template.jinja2_prompts"]], "aiflows.utils package": [[42, "aiflows-utils-package"]], "aiflows.utils.general_helpers module": [[42, "module-aiflows.utils.general_helpers"]], "aiflows.utils.io_utils module": [[42, "module-aiflows.utils.io_utils"]], "aiflows.utils.logging module": [[42, "module-aiflows.utils.logging"]], "aiflows.utils.rich_utils module": [[42, "module-aiflows.utils.rich_utils"]], "aiflows": [[43, "aiflows"]]}, "indexentries": {"aiflows": [[30, "module-aiflows"]], "module": [[30, "module-aiflows"], [31, "module-aiflows.backends"], [31, "module-aiflows.backends.api_info"], [31, "module-aiflows.backends.llm_lite"], [32, "module-aiflows.base_flows"], [32, "module-aiflows.base_flows.abstract"], [32, "module-aiflows.base_flows.atomic"], [32, "module-aiflows.base_flows.branching"], [32, "module-aiflows.base_flows.circular"], [32, "module-aiflows.base_flows.composite"], [32, "module-aiflows.base_flows.sequential"], [33, "module-aiflows.data_transformations"], [33, "module-aiflows.data_transformations.abstract"], [33, "module-aiflows.data_transformations.end_of_interaction"], [33, "module-aiflows.data_transformations.json"], [33, "module-aiflows.data_transformations.key_copy"], [33, "module-aiflows.data_transformations.key_delete"], [33, "module-aiflows.data_transformations.key_match_input"], [33, "module-aiflows.data_transformations.key_rename"], [33, "module-aiflows.data_transformations.key_select"], [33, "module-aiflows.data_transformations.key_set"], [33, "module-aiflows.data_transformations.print_previous_messages"], [33, "module-aiflows.data_transformations.regex_extractor_first"], [33, "module-aiflows.data_transformations.unnesting_dict"], [34, "module-aiflows.datasets"], [34, "module-aiflows.datasets.abstract"], [34, "module-aiflows.datasets.demonstrations_11"], [34, "module-aiflows.datasets.outputs"], [35, "module-aiflows.flow_cache"], [35, "module-aiflows.flow_cache.flow_cache"], [36, "module-aiflows.flow_launchers"], [36, "module-aiflows.flow_launchers.abstract"], [36, "module-aiflows.flow_launchers.flow_API_launcher"], [37, "module-aiflows.flow_verse"], [37, "module-aiflows.flow_verse.loading"], [37, "module-aiflows.flow_verse.utils"], [38, "module-aiflows.history"], [38, "module-aiflows.history.flow_history"], [39, "module-aiflows.interfaces"], [39, "module-aiflows.interfaces.abstract"], [39, "module-aiflows.interfaces.key_interface"], [40, "module-aiflows.messages"], [40, "module-aiflows.messages.abstract"], [40, "module-aiflows.messages.flow_message"], [41, "module-aiflows.prompt_template"], [41, "module-aiflows.prompt_template.jinja2_prompts"], [42, "module-aiflows.utils"], [42, "module-aiflows.utils.general_helpers"], [42, "module-aiflows.utils.io_utils"], [42, "module-aiflows.utils.logging"], [42, "module-aiflows.utils.rich_utils"]], "apiinfo (class in aiflows.backends.api_info)": [[31, "aiflows.backends.api_info.ApiInfo"]], "litellmbackend (class in aiflows.backends.llm_lite)": [[31, "aiflows.backends.llm_lite.LiteLLMBackend"]], "aiflows.backends": [[31, "module-aiflows.backends"]], "aiflows.backends.api_info": [[31, "module-aiflows.backends.api_info"]], "aiflows.backends.llm_lite": [[31, "module-aiflows.backends.llm_lite"]], "api_base (aiflows.backends.api_info.apiinfo attribute)": [[31, "aiflows.backends.api_info.ApiInfo.api_base"]], "api_key (aiflows.backends.api_info.apiinfo attribute)": [[31, "aiflows.backends.api_info.ApiInfo.api_key"]], "api_version (aiflows.backends.api_info.apiinfo attribute)": [[31, "aiflows.backends.api_info.ApiInfo.api_version"]], "backend_used (aiflows.backends.api_info.apiinfo attribute)": [[31, "aiflows.backends.api_info.ApiInfo.backend_used"]], "get_key() (aiflows.backends.llm_lite.litellmbackend method)": [[31, "aiflows.backends.llm_lite.LiteLLMBackend.get_key"]], "make_unique_api_info_key() (aiflows.backends.llm_lite.litellmbackend static method)": [[31, "aiflows.backends.llm_lite.LiteLLMBackend.make_unique_api_info_key"]], "merge_delta_to_stream() (in module aiflows.backends.llm_lite)": [[31, "aiflows.backends.llm_lite.merge_delta_to_stream"]], "merge_streams() (in module aiflows.backends.llm_lite)": [[31, "aiflows.backends.llm_lite.merge_streams"]], "atomicflow (class in aiflows.base_flows.atomic)": [[32, "aiflows.base_flows.atomic.AtomicFlow"]], "branchingflow (class in aiflows.base_flows.branching)": [[32, "aiflows.base_flows.branching.BranchingFlow"]], "circularflow (class in aiflows.base_flows.circular)": [[32, "aiflows.base_flows.circular.CircularFlow"]], "compositeflow (class in aiflows.base_flows.composite)": [[32, "aiflows.base_flows.composite.CompositeFlow"]], "flow (class in aiflows.base_flows.abstract)": [[32, "aiflows.base_flows.abstract.Flow"]], "required_keys_config (aiflows.base_flows.abstract.flow attribute)": [[32, "aiflows.base_flows.abstract.Flow.REQUIRED_KEYS_CONFIG"]], "required_keys_config (aiflows.base_flows.circular.circularflow attribute)": [[32, "aiflows.base_flows.circular.CircularFlow.REQUIRED_KEYS_CONFIG"]], "required_keys_config (aiflows.base_flows.composite.compositeflow attribute)": [[32, "aiflows.base_flows.composite.CompositeFlow.REQUIRED_KEYS_CONFIG"]], "supports_caching (aiflows.base_flows.abstract.flow attribute)": [[32, "aiflows.base_flows.abstract.Flow.SUPPORTS_CACHING"]], "sequentialflow (class in aiflows.base_flows.sequential)": [[32, "aiflows.base_flows.sequential.SequentialFlow"]], "topologynode (class in aiflows.base_flows.circular)": [[32, "aiflows.base_flows.circular.TopologyNode"]], "aiflows.base_flows": [[32, "module-aiflows.base_flows"]], "aiflows.base_flows.abstract": [[32, "module-aiflows.base_flows.abstract"]], "aiflows.base_flows.atomic": [[32, "module-aiflows.base_flows.atomic"]], "aiflows.base_flows.branching": [[32, "module-aiflows.base_flows.branching"]], "aiflows.base_flows.circular": [[32, "module-aiflows.base_flows.circular"]], "aiflows.base_flows.composite": [[32, "module-aiflows.base_flows.composite"]], "aiflows.base_flows.sequential": [[32, "module-aiflows.base_flows.sequential"]], "flow_config (aiflows.base_flows.abstract.flow attribute)": [[32, "aiflows.base_flows.abstract.Flow.flow_config"]], "flow_config (aiflows.base_flows.atomic.atomicflow attribute)": [[32, "aiflows.base_flows.atomic.AtomicFlow.flow_config"]], "flow_config (aiflows.base_flows.sequential.sequentialflow attribute)": [[32, "aiflows.base_flows.sequential.SequentialFlow.flow_config"]], "flow_state (aiflows.base_flows.abstract.flow attribute)": [[32, "aiflows.base_flows.abstract.Flow.flow_state"]], "flow_state (aiflows.base_flows.atomic.atomicflow attribute)": [[32, "aiflows.base_flows.atomic.AtomicFlow.flow_state"]], "flow_state (aiflows.base_flows.sequential.sequentialflow attribute)": [[32, "aiflows.base_flows.sequential.SequentialFlow.flow_state"]], "get_config() (aiflows.base_flows.abstract.flow class method)": [[32, "aiflows.base_flows.abstract.Flow.get_config"]], "get_interface_description() (aiflows.base_flows.abstract.flow method)": [[32, "aiflows.base_flows.abstract.Flow.get_interface_description"]], "history (aiflows.base_flows.abstract.flow attribute)": [[32, "aiflows.base_flows.abstract.Flow.history"]], "history (aiflows.base_flows.atomic.atomicflow attribute)": [[32, "aiflows.base_flows.atomic.AtomicFlow.history"]], "history (aiflows.base_flows.sequential.sequentialflow attribute)": [[32, "aiflows.base_flows.sequential.SequentialFlow.history"]], "input_msg_payload_builder() (aiflows.base_flows.circular.circularflow static method)": [[32, "aiflows.base_flows.circular.CircularFlow.input_msg_payload_builder"]], "instantiate_from_config() (aiflows.base_flows.abstract.flow class method)": [[32, "aiflows.base_flows.abstract.Flow.instantiate_from_config"]], "instantiate_from_config() (aiflows.base_flows.composite.compositeflow class method)": [[32, "aiflows.base_flows.composite.CompositeFlow.instantiate_from_config"]], "instantiate_from_default_config() (aiflows.base_flows.abstract.flow class method)": [[32, "aiflows.base_flows.abstract.Flow.instantiate_from_default_config"]], "instantiate_with_overrides() (aiflows.base_flows.abstract.flow class method)": [[32, "aiflows.base_flows.abstract.Flow.instantiate_with_overrides"]], "name (aiflows.base_flows.abstract.flow property)": [[32, "aiflows.base_flows.abstract.Flow.name"]], "output_msg_payload_processor() (aiflows.base_flows.circular.circularflow static method)": [[32, "aiflows.base_flows.circular.CircularFlow.output_msg_payload_processor"]], "reset() (aiflows.base_flows.abstract.flow method)": [[32, "aiflows.base_flows.abstract.Flow.reset"]], "run() (aiflows.base_flows.abstract.flow method)": [[32, "aiflows.base_flows.abstract.Flow.run"]], "run() (aiflows.base_flows.branching.branchingflow method)": [[32, "aiflows.base_flows.branching.BranchingFlow.run"]], "run() (aiflows.base_flows.circular.circularflow method)": [[32, "aiflows.base_flows.circular.CircularFlow.run"]], "set_up_flow_state() (aiflows.base_flows.abstract.flow method)": [[32, "aiflows.base_flows.abstract.Flow.set_up_flow_state"]], "subflows (aiflows.base_flows.branching.branchingflow attribute)": [[32, "aiflows.base_flows.branching.BranchingFlow.subflows"]], "subflows (aiflows.base_flows.circular.circularflow attribute)": [[32, "aiflows.base_flows.circular.CircularFlow.subflows"]], "subflows (aiflows.base_flows.composite.compositeflow attribute)": [[32, "aiflows.base_flows.composite.CompositeFlow.subflows"]], "subflows (aiflows.base_flows.sequential.sequentialflow attribute)": [[32, "aiflows.base_flows.sequential.SequentialFlow.subflows"]], "type() (aiflows.base_flows.abstract.flow class method)": [[32, "aiflows.base_flows.abstract.Flow.type"]], "type() (aiflows.base_flows.atomic.atomicflow class method)": [[32, "aiflows.base_flows.atomic.AtomicFlow.type"]], "type() (aiflows.base_flows.branching.branchingflow class method)": [[32, "aiflows.base_flows.branching.BranchingFlow.type"]], "type() (aiflows.base_flows.circular.circularflow class method)": [[32, "aiflows.base_flows.circular.CircularFlow.type"]], "type() (aiflows.base_flows.composite.compositeflow class method)": [[32, "aiflows.base_flows.composite.CompositeFlow.type"]], "type() (aiflows.base_flows.sequential.sequentialflow class method)": [[32, "aiflows.base_flows.sequential.SequentialFlow.type"]], "datatransformation (class in aiflows.data_transformations.abstract)": [[33, "aiflows.data_transformations.abstract.DataTransformation"]], "endofinteraction (class in aiflows.data_transformations.end_of_interaction)": [[33, "aiflows.data_transformations.end_of_interaction.EndOfInteraction"]], "json2obj (class in aiflows.data_transformations.json)": [[33, "aiflows.data_transformations.json.Json2Obj"]], "keycopy (class in aiflows.data_transformations.key_copy)": [[33, "aiflows.data_transformations.key_copy.KeyCopy"]], "keydelete (class in aiflows.data_transformations.key_delete)": [[33, "aiflows.data_transformations.key_delete.KeyDelete"]], "keymatchinput (class in aiflows.data_transformations.key_match_input)": [[33, "aiflows.data_transformations.key_match_input.KeyMatchInput"]], "keyrename (class in aiflows.data_transformations.key_rename)": [[33, "aiflows.data_transformations.key_rename.KeyRename"]], "keyselect (class in aiflows.data_transformations.key_select)": [[33, "aiflows.data_transformations.key_select.KeySelect"]], "keyset (class in aiflows.data_transformations.key_set)": [[33, "aiflows.data_transformations.key_set.KeySet"]], "obj2json (class in aiflows.data_transformations.json)": [[33, "aiflows.data_transformations.json.Obj2Json"]], "printpreviousmessages (class in aiflows.data_transformations.print_previous_messages)": [[33, "aiflows.data_transformations.print_previous_messages.PrintPreviousMessages"]], "regexfirstoccurrenceextractor (class in aiflows.data_transformations.regex_extractor_first)": [[33, "aiflows.data_transformations.regex_extractor_first.RegexFirstOccurrenceExtractor"]], "unnesting (class in aiflows.data_transformations.unnesting_dict)": [[33, "aiflows.data_transformations.unnesting_dict.UnNesting"]], "aiflows.data_transformations": [[33, "module-aiflows.data_transformations"]], "aiflows.data_transformations.abstract": [[33, "module-aiflows.data_transformations.abstract"]], "aiflows.data_transformations.end_of_interaction": [[33, "module-aiflows.data_transformations.end_of_interaction"]], "aiflows.data_transformations.json": [[33, "module-aiflows.data_transformations.json"]], "aiflows.data_transformations.key_copy": [[33, "module-aiflows.data_transformations.key_copy"]], "aiflows.data_transformations.key_delete": [[33, "module-aiflows.data_transformations.key_delete"]], "aiflows.data_transformations.key_match_input": [[33, "module-aiflows.data_transformations.key_match_input"]], "aiflows.data_transformations.key_rename": [[33, "module-aiflows.data_transformations.key_rename"]], "aiflows.data_transformations.key_select": [[33, "module-aiflows.data_transformations.key_select"]], "aiflows.data_transformations.key_set": [[33, "module-aiflows.data_transformations.key_set"]], "aiflows.data_transformations.print_previous_messages": [[33, "module-aiflows.data_transformations.print_previous_messages"]], "aiflows.data_transformations.regex_extractor_first": [[33, "module-aiflows.data_transformations.regex_extractor_first"]], "aiflows.data_transformations.unnesting_dict": [[33, "module-aiflows.data_transformations.unnesting_dict"]], "abstractdataset (class in aiflows.datasets.abstract)": [[34, "aiflows.datasets.abstract.AbstractDataset"]], "genericdemonstrationsdataset (class in aiflows.datasets.demonstrations_11)": [[34, "aiflows.datasets.demonstrations_11.GenericDemonstrationsDataset"]], "outputsdataset (class in aiflows.datasets.outputs)": [[34, "aiflows.datasets.outputs.OutputsDataset"]], "aiflows.datasets": [[34, "module-aiflows.datasets"]], "aiflows.datasets.abstract": [[34, "module-aiflows.datasets.abstract"]], "aiflows.datasets.demonstrations_11": [[34, "module-aiflows.datasets.demonstrations_11"]], "aiflows.datasets.outputs": [[34, "module-aiflows.datasets.outputs"]], "get_output_data() (aiflows.datasets.outputs.outputsdataset static method)": [[34, "aiflows.datasets.outputs.OutputsDataset.get_output_data"]], "caching_parameters (class in aiflows.flow_cache.flow_cache)": [[35, "aiflows.flow_cache.flow_cache.CACHING_PARAMETERS"]], "cachingkey (class in aiflows.flow_cache.flow_cache)": [[35, "aiflows.flow_cache.flow_cache.CachingKey"]], "cachingvalue (class in aiflows.flow_cache.flow_cache)": [[35, "aiflows.flow_cache.flow_cache.CachingValue"]], "flowcache (class in aiflows.flow_cache.flow_cache)": [[35, "aiflows.flow_cache.flow_cache.FlowCache"]], "aiflows.flow_cache": [[35, "module-aiflows.flow_cache"]], "aiflows.flow_cache.flow_cache": [[35, "module-aiflows.flow_cache.flow_cache"]], "cache_dir (aiflows.flow_cache.flow_cache.caching_parameters attribute)": [[35, "aiflows.flow_cache.flow_cache.CACHING_PARAMETERS.cache_dir"]], "clear_cache() (in module aiflows.flow_cache.flow_cache)": [[35, "aiflows.flow_cache.flow_cache.clear_cache"]], "do_caching (aiflows.flow_cache.flow_cache.caching_parameters attribute)": [[35, "aiflows.flow_cache.flow_cache.CACHING_PARAMETERS.do_caching"]], "flow (aiflows.flow_cache.flow_cache.cachingkey attribute)": [[35, "aiflows.flow_cache.flow_cache.CachingKey.flow"]], "full_state (aiflows.flow_cache.flow_cache.cachingvalue attribute)": [[35, "aiflows.flow_cache.flow_cache.CachingValue.full_state"]], "get() (aiflows.flow_cache.flow_cache.flowcache method)": [[35, "aiflows.flow_cache.flow_cache.FlowCache.get"]], "get_cache_dir() (in module aiflows.flow_cache.flow_cache)": [[35, "aiflows.flow_cache.flow_cache.get_cache_dir"]], "hash_string() (aiflows.flow_cache.flow_cache.cachingkey method)": [[35, "aiflows.flow_cache.flow_cache.CachingKey.hash_string"]], "history_messages_created (aiflows.flow_cache.flow_cache.cachingvalue attribute)": [[35, "aiflows.flow_cache.flow_cache.CachingValue.history_messages_created"]], "input_data (aiflows.flow_cache.flow_cache.cachingkey attribute)": [[35, "aiflows.flow_cache.flow_cache.CachingKey.input_data"]], "keys_to_ignore_for_hash (aiflows.flow_cache.flow_cache.cachingkey attribute)": [[35, "aiflows.flow_cache.flow_cache.CachingKey.keys_to_ignore_for_hash"]], "max_cached_entries (aiflows.flow_cache.flow_cache.caching_parameters attribute)": [[35, "aiflows.flow_cache.flow_cache.CACHING_PARAMETERS.max_cached_entries"]], "output_results (aiflows.flow_cache.flow_cache.cachingvalue attribute)": [[35, "aiflows.flow_cache.flow_cache.CachingValue.output_results"]], "pop() (aiflows.flow_cache.flow_cache.flowcache method)": [[35, "aiflows.flow_cache.flow_cache.FlowCache.pop"]], "set() (aiflows.flow_cache.flow_cache.flowcache method)": [[35, "aiflows.flow_cache.flow_cache.FlowCache.set"]], "baselauncher (class in aiflows.flow_launchers.abstract)": [[36, "aiflows.flow_launchers.abstract.BaseLauncher"]], "flowlauncher (class in aiflows.flow_launchers.flow_api_launcher)": [[36, "aiflows.flow_launchers.flow_API_launcher.FlowLauncher"]], "multithreadedapilauncher (class in aiflows.flow_launchers.abstract)": [[36, "aiflows.flow_launchers.abstract.MultiThreadedAPILauncher"]], "aiflows.flow_launchers": [[36, "module-aiflows.flow_launchers"]], "aiflows.flow_launchers.abstract": [[36, "module-aiflows.flow_launchers.abstract"]], "aiflows.flow_launchers.flow_api_launcher": [[36, "module-aiflows.flow_launchers.flow_API_launcher"]], "launch() (aiflows.flow_launchers.flow_api_launcher.flowlauncher class method)": [[36, "aiflows.flow_launchers.flow_API_launcher.FlowLauncher.launch"]], "predict() (aiflows.flow_launchers.abstract.baselauncher method)": [[36, "aiflows.flow_launchers.abstract.BaseLauncher.predict"]], "predict() (aiflows.flow_launchers.flow_api_launcher.flowlauncher method)": [[36, "aiflows.flow_launchers.flow_API_launcher.FlowLauncher.predict"]], "predict_batch() (aiflows.flow_launchers.flow_api_launcher.flowlauncher class method)": [[36, "aiflows.flow_launchers.flow_API_launcher.FlowLauncher.predict_batch"]], "predict_dataloader() (aiflows.flow_launchers.abstract.baselauncher method)": [[36, "aiflows.flow_launchers.abstract.BaseLauncher.predict_dataloader"]], "predict_dataloader() (aiflows.flow_launchers.abstract.multithreadedapilauncher method)": [[36, "aiflows.flow_launchers.abstract.MultiThreadedAPILauncher.predict_dataloader"]], "predict_sample() (aiflows.flow_launchers.flow_api_launcher.flowlauncher static method)": [[36, "aiflows.flow_launchers.flow_API_launcher.FlowLauncher.predict_sample"]], "write_batch_output() (aiflows.flow_launchers.abstract.baselauncher class method)": [[36, "aiflows.flow_launchers.abstract.BaseLauncher.write_batch_output"]], "flowmodulespec (class in aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec"]], "flowmodulespecsummary (class in aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary"]], "add_mod() (aiflows.flow_verse.loading.flowmodulespecsummary method)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary.add_mod"]], "add_to_sys_path() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.add_to_sys_path"]], "aiflows.flow_verse": [[37, "module-aiflows.flow_verse"]], "aiflows.flow_verse.loading": [[37, "module-aiflows.flow_verse.loading"]], "aiflows.flow_verse.utils": [[37, "module-aiflows.flow_verse.utils"]], "build_hf_cache_path() (in module aiflows.flow_verse.utils)": [[37, "aiflows.flow_verse.utils.build_hf_cache_path"]], "build_mod_id() (aiflows.flow_verse.loading.flowmodulespec static method)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec.build_mod_id"]], "cache_dir (aiflows.flow_verse.loading.flowmodulespec attribute)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec.cache_dir"]], "cache_root (aiflows.flow_verse.loading.flowmodulespecsummary property)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary.cache_root"]], "commit_hash (aiflows.flow_verse.loading.flowmodulespec attribute)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec.commit_hash"]], "create_empty_flow_mod_file() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.create_empty_flow_mod_file"]], "create_init_py() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.create_init_py"]], "extract_commit_hash_from_cache_mod_dir() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.extract_commit_hash_from_cache_mod_dir"]], "fetch_local() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.fetch_local"]], "fetch_remote() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.fetch_remote"]], "from_flow_mod_file() (aiflows.flow_verse.loading.flowmodulespecsummary static method)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary.from_flow_mod_file"]], "get_mod() (aiflows.flow_verse.loading.flowmodulespecsummary method)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary.get_mod"]], "get_mods() (aiflows.flow_verse.loading.flowmodulespecsummary method)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary.get_mods"]], "is_local_revision() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.is_local_revision"]], "is_local_revision() (in module aiflows.flow_verse.utils)": [[37, "aiflows.flow_verse.utils.is_local_revision"]], "is_local_sync_dir_valid() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.is_local_sync_dir_valid"]], "is_sync_dir_modified() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.is_sync_dir_modified"]], "mod_id (aiflows.flow_verse.loading.flowmodulespec property)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec.mod_id"]], "remove_dir_or_link() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.remove_dir_or_link"]], "repo_id (aiflows.flow_verse.loading.flowmodulespec attribute)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec.repo_id"]], "retrive_commit_hash_from_remote() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.retrive_commit_hash_from_remote"]], "revision (aiflows.flow_verse.loading.flowmodulespec attribute)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec.revision"]], "serialize() (aiflows.flow_verse.loading.flowmodulespecsummary method)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary.serialize"]], "sync_dependencies() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.sync_dependencies"]], "sync_dir (aiflows.flow_verse.loading.flowmodulespec attribute)": [[37, "aiflows.flow_verse.loading.FlowModuleSpec.sync_dir"]], "sync_local_dep() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.sync_local_dep"]], "sync_remote_dep() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.sync_remote_dep"]], "sync_root (aiflows.flow_verse.loading.flowmodulespecsummary property)": [[37, "aiflows.flow_verse.loading.FlowModuleSpecSummary.sync_root"]], "validate_and_augment_dependency() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.validate_and_augment_dependency"]], "write_flow_mod_summary() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.write_flow_mod_summary"]], "write_or_append_gitignore() (in module aiflows.flow_verse.loading)": [[37, "aiflows.flow_verse.loading.write_or_append_gitignore"]], "flowhistory (class in aiflows.history.flow_history)": [[38, "aiflows.history.flow_history.FlowHistory"]], "add_message() (aiflows.history.flow_history.flowhistory method)": [[38, "aiflows.history.flow_history.FlowHistory.add_message"]], "aiflows.history": [[38, "module-aiflows.history"]], "aiflows.history.flow_history": [[38, "module-aiflows.history.flow_history"]], "get_last_n_messages() (aiflows.history.flow_history.flowhistory method)": [[38, "aiflows.history.flow_history.FlowHistory.get_last_n_messages"]], "to_list() (aiflows.history.flow_history.flowhistory method)": [[38, "aiflows.history.flow_history.FlowHistory.to_list"]], "to_string() (aiflows.history.flow_history.flowhistory method)": [[38, "aiflows.history.flow_history.FlowHistory.to_string"]], "interface (class in aiflows.interfaces.abstract)": [[39, "aiflows.interfaces.abstract.Interface"]], "keyinterface (class in aiflows.interfaces.key_interface)": [[39, "aiflows.interfaces.key_interface.KeyInterface"]], "aiflows.interfaces": [[39, "module-aiflows.interfaces"]], "aiflows.interfaces.abstract": [[39, "module-aiflows.interfaces.abstract"]], "aiflows.interfaces.key_interface": [[39, "module-aiflows.interfaces.key_interface"]], "inputmessage (class in aiflows.messages.flow_message)": [[40, "aiflows.messages.flow_message.InputMessage"]], "message (class in aiflows.messages.abstract)": [[40, "aiflows.messages.abstract.Message"]], "outputmessage (class in aiflows.messages.flow_message)": [[40, "aiflows.messages.flow_message.OutputMessage"]], "updatemessage_chatmessage (class in aiflows.messages.flow_message)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage"]], "updatemessage_fullreset (class in aiflows.messages.flow_message)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset"]], "updatemessage_generic (class in aiflows.messages.flow_message)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic"]], "updatemessage_namespacereset (class in aiflows.messages.flow_message)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset"]], "aiflows.messages": [[40, "module-aiflows.messages"]], "aiflows.messages.abstract": [[40, "module-aiflows.messages.abstract"]], "aiflows.messages.flow_message": [[40, "module-aiflows.messages.flow_message"]], "build() (aiflows.messages.flow_message.inputmessage static method)": [[40, "aiflows.messages.flow_message.InputMessage.build"]], "created_at (aiflows.messages.abstract.message attribute)": [[40, "aiflows.messages.abstract.Message.created_at"]], "created_at (aiflows.messages.flow_message.inputmessage attribute)": [[40, "aiflows.messages.flow_message.InputMessage.created_at"]], "created_at (aiflows.messages.flow_message.outputmessage attribute)": [[40, "aiflows.messages.flow_message.OutputMessage.created_at"]], "created_at (aiflows.messages.flow_message.updatemessage_chatmessage attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage.created_at"]], "created_at (aiflows.messages.flow_message.updatemessage_fullreset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset.created_at"]], "created_at (aiflows.messages.flow_message.updatemessage_generic attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic.created_at"]], "created_at (aiflows.messages.flow_message.updatemessage_namespacereset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset.created_at"]], "created_by (aiflows.messages.abstract.message attribute)": [[40, "aiflows.messages.abstract.Message.created_by"]], "created_by (aiflows.messages.flow_message.inputmessage attribute)": [[40, "aiflows.messages.flow_message.InputMessage.created_by"]], "created_by (aiflows.messages.flow_message.outputmessage attribute)": [[40, "aiflows.messages.flow_message.OutputMessage.created_by"]], "created_by (aiflows.messages.flow_message.updatemessage_chatmessage attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage.created_by"]], "created_by (aiflows.messages.flow_message.updatemessage_fullreset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset.created_by"]], "created_by (aiflows.messages.flow_message.updatemessage_generic attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic.created_by"]], "created_by (aiflows.messages.flow_message.updatemessage_namespacereset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset.created_by"]], "data (aiflows.messages.abstract.message attribute)": [[40, "aiflows.messages.abstract.Message.data"]], "data (aiflows.messages.flow_message.inputmessage attribute)": [[40, "aiflows.messages.flow_message.InputMessage.data"]], "data (aiflows.messages.flow_message.outputmessage attribute)": [[40, "aiflows.messages.flow_message.OutputMessage.data"]], "data (aiflows.messages.flow_message.updatemessage_chatmessage attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage.data"]], "data (aiflows.messages.flow_message.updatemessage_fullreset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset.data"]], "data (aiflows.messages.flow_message.updatemessage_generic attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic.data"]], "data (aiflows.messages.flow_message.updatemessage_namespacereset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset.data"]], "get_output_data() (aiflows.messages.flow_message.outputmessage method)": [[40, "aiflows.messages.flow_message.OutputMessage.get_output_data"]], "message_id (aiflows.messages.abstract.message attribute)": [[40, "aiflows.messages.abstract.Message.message_id"]], "message_id (aiflows.messages.flow_message.inputmessage attribute)": [[40, "aiflows.messages.flow_message.InputMessage.message_id"]], "message_id (aiflows.messages.flow_message.outputmessage attribute)": [[40, "aiflows.messages.flow_message.OutputMessage.message_id"]], "message_id (aiflows.messages.flow_message.updatemessage_chatmessage attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage.message_id"]], "message_id (aiflows.messages.flow_message.updatemessage_fullreset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset.message_id"]], "message_id (aiflows.messages.flow_message.updatemessage_generic attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic.message_id"]], "message_id (aiflows.messages.flow_message.updatemessage_namespacereset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset.message_id"]], "message_type (aiflows.messages.abstract.message attribute)": [[40, "aiflows.messages.abstract.Message.message_type"]], "message_type (aiflows.messages.flow_message.inputmessage attribute)": [[40, "aiflows.messages.flow_message.InputMessage.message_type"]], "message_type (aiflows.messages.flow_message.outputmessage attribute)": [[40, "aiflows.messages.flow_message.OutputMessage.message_type"]], "message_type (aiflows.messages.flow_message.updatemessage_chatmessage attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage.message_type"]], "message_type (aiflows.messages.flow_message.updatemessage_fullreset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset.message_type"]], "message_type (aiflows.messages.flow_message.updatemessage_generic attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic.message_type"]], "message_type (aiflows.messages.flow_message.updatemessage_namespacereset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset.message_type"]], "private_keys (aiflows.messages.abstract.message attribute)": [[40, "aiflows.messages.abstract.Message.private_keys"]], "private_keys (aiflows.messages.flow_message.inputmessage attribute)": [[40, "aiflows.messages.flow_message.InputMessage.private_keys"]], "private_keys (aiflows.messages.flow_message.outputmessage attribute)": [[40, "aiflows.messages.flow_message.OutputMessage.private_keys"]], "private_keys (aiflows.messages.flow_message.updatemessage_chatmessage attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage.private_keys"]], "private_keys (aiflows.messages.flow_message.updatemessage_fullreset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset.private_keys"]], "private_keys (aiflows.messages.flow_message.updatemessage_generic attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic.private_keys"]], "private_keys (aiflows.messages.flow_message.updatemessage_namespacereset attribute)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset.private_keys"]], "to_dict() (aiflows.messages.abstract.message method)": [[40, "aiflows.messages.abstract.Message.to_dict"]], "to_string() (aiflows.messages.abstract.message method)": [[40, "aiflows.messages.abstract.Message.to_string"]], "to_string() (aiflows.messages.flow_message.inputmessage method)": [[40, "aiflows.messages.flow_message.InputMessage.to_string"]], "to_string() (aiflows.messages.flow_message.outputmessage method)": [[40, "aiflows.messages.flow_message.OutputMessage.to_string"]], "to_string() (aiflows.messages.flow_message.updatemessage_chatmessage method)": [[40, "aiflows.messages.flow_message.UpdateMessage_ChatMessage.to_string"]], "to_string() (aiflows.messages.flow_message.updatemessage_fullreset method)": [[40, "aiflows.messages.flow_message.UpdateMessage_FullReset.to_string"]], "to_string() (aiflows.messages.flow_message.updatemessage_generic method)": [[40, "aiflows.messages.flow_message.UpdateMessage_Generic.to_string"]], "to_string() (aiflows.messages.flow_message.updatemessage_namespacereset method)": [[40, "aiflows.messages.flow_message.UpdateMessage_NamespaceReset.to_string"]], "jinjaprompt (class in aiflows.prompt_template.jinja2_prompts)": [[41, "aiflows.prompt_template.jinja2_prompts.JinjaPrompt"]], "aiflows.prompt_template": [[41, "module-aiflows.prompt_template"]], "aiflows.prompt_template.jinja2_prompts": [[41, "module-aiflows.prompt_template.jinja2_prompts"]], "format() (aiflows.prompt_template.jinja2_prompts.jinjaprompt method)": [[41, "aiflows.prompt_template.jinja2_prompts.JinjaPrompt.format"]], "partial() (aiflows.prompt_template.jinja2_prompts.jinjaprompt method)": [[41, "aiflows.prompt_template.jinja2_prompts.JinjaPrompt.partial"]], "add_handler() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.add_handler"]], "aiflows.utils": [[42, "module-aiflows.utils"]], "aiflows.utils.general_helpers": [[42, "module-aiflows.utils.general_helpers"]], "aiflows.utils.io_utils": [[42, "module-aiflows.utils.io_utils"]], "aiflows.utils.logging": [[42, "module-aiflows.utils.logging"]], "aiflows.utils.rich_utils": [[42, "module-aiflows.utils.rich_utils"]], "auto_set_dir() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.auto_set_dir"]], "create_unique_id() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.create_unique_id"]], "disable_default_handler() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.disable_default_handler"]], "disable_propagation() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.disable_propagation"]], "enable_default_handler() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.enable_default_handler"]], "enable_explicit_format() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.enable_explicit_format"]], "enable_propagation() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.enable_propagation"]], "encode_from_buffer() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.encode_from_buffer"]], "encode_image() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.encode_image"]], "exception_handler() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.exception_handler"]], "extract_top_level_function_names() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.extract_top_level_function_names"]], "find_replace_in_dict() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.find_replace_in_dict"]], "flatten_dict() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.flatten_dict"]], "get_current_datetime_ns() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.get_current_datetime_ns"]], "get_function_from_name() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.get_function_from_name"]], "get_log_levels_dict() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.get_log_levels_dict"]], "get_logger() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.get_logger"]], "get_logger_dir() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.get_logger_dir"]], "get_predictions_dir_path() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.get_predictions_dir_path"]], "get_verbosity() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.get_verbosity"]], "load_pickle() (in module aiflows.utils.io_utils)": [[42, "aiflows.utils.io_utils.load_pickle"]], "log_suggest_help() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.log_suggest_help"]], "nested_keys_pop() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.nested_keys_pop"]], "nested_keys_search() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.nested_keys_search"]], "nested_keys_update() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.nested_keys_update"]], "print_config_tree() (in module aiflows.utils.rich_utils)": [[42, "aiflows.utils.rich_utils.print_config_tree"]], "process_config_leafs() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.process_config_leafs"]], "python_file_path_to_module_path() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.python_file_path_to_module_path"]], "python_module_path_to_file_path() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.python_module_path_to_file_path"]], "read_gzipped_jsonlines() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.read_gzipped_jsonlines"]], "read_jsonlines() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.read_jsonlines"]], "read_outputs() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.read_outputs"]], "read_yaml_file() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.read_yaml_file"]], "recursive_dictionary_update() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.recursive_dictionary_update"]], "recursive_json_serialize() (in module aiflows.utils.io_utils)": [[42, "aiflows.utils.io_utils.recursive_json_serialize"]], "remove_handler() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.remove_handler"]], "reset_format() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.reset_format"]], "set_dir() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.set_dir"]], "set_verbosity() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.set_verbosity"]], "set_verbosity_debug() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.set_verbosity_debug"]], "set_verbosity_error() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.set_verbosity_error"]], "set_verbosity_info() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.set_verbosity_info"]], "set_verbosity_warning() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.set_verbosity_warning"]], "try_except_decorator() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.try_except_decorator"]], "unflatten_dict() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.unflatten_dict"]], "validate_flow_config() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.validate_flow_config"]], "warning_advice() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.warning_advice"]], "warning_once() (in module aiflows.utils.logging)": [[42, "aiflows.utils.logging.warning_once"]], "write_gzipped_jsonlines() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.write_gzipped_jsonlines"]], "write_jsonlines() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.write_jsonlines"]], "write_outputs() (in module aiflows.utils.general_helpers)": [[42, "aiflows.utils.general_helpers.write_outputs"]]}}) \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.backends.html b/docs/built_with_sphinx/html/source/aiflows.backends.html new file mode 100644 index 0000000..b5d138f --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.backends.html @@ -0,0 +1,309 @@ + + + + + + + + + +aiflows.backends package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.backends package

+
+

Submodules

+
+
+

aiflows.backends.api_info module

+
+
+class aiflows.backends.api_info.ApiInfo(backend_used: str | None = None, api_version: str | None = None, api_key: str | None = None, api_base: str | None = None)
+

Bases: object

+

This class contains the information about an API key.

+
+
Parameters:
+
    +
  • backend_used (str, optional) – The backend used

  • +
  • api_version (str, optional) – The version of the API

  • +
  • api_key (str, optional) – The API key

  • +
  • api_base (str, optional) – The base URL of the API

  • +
+
+
+
+
+api_base: str | None = None
+
+
+
+api_key: str = None
+
+
+
+api_version: str | None = None
+
+
+
+backend_used: str = None
+
+
+
+
+

aiflows.backends.llm_lite module

+
+
+class aiflows.backends.llm_lite.LiteLLMBackend(api_infos, model_name, **kwargs)
+

Bases: object

+

This class is a wrapper around the litellm library. It allows to use multiple API keys and to switch between them +automatically when one is exhausted.

+
+
Parameters:
+
    +
  • api_infos (List[ApiInfo]) – A list of ApiInfo objects, each containing the information about one API key

  • +
  • model_name (Union[str, Dict[str, str]]) – The name of the model to use. Can be a string or a dictionary from API to model name

  • +
  • wait_time_per_key (int) – The minimum time to wait between two calls on the same API key

  • +
  • embeddings_call (bool) – Whether to use the embedding API or the completion API

  • +
  • kwargs (Any) – Additional parameters to pass to the litellm library

  • +
+
+
+
+
+get_key()
+

Gets the next API key to use

+
+
Returns:
+

The next API key to use

+
+
Return type:
+

ApiInfo

+
+
+
+
+
+static make_unique_api_info_key(api_info: ApiInfo)
+

Makes a unique key for the api_info object

+
+
Parameters:
+

api_info (ApiInfo) – The api_info object

+
+
Returns:
+

The unique key for the api_info object

+
+
Return type:
+

str

+
+
+
+
+
+
+aiflows.backends.llm_lite.merge_delta_to_stream(merged_stream, delta)
+

Merges a delta to a stream. It is used to merge the deltas from the streamed response of the litellm library.

+
+
Parameters:
+
    +
  • merged_stream (Dict[str, Any]) – The already merged stream

  • +
  • delta (Dict[str, Any]) – The delta to merge with the merge_stream

  • +
+
+
Returns:
+

The merged stream

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+aiflows.backends.llm_lite.merge_streams(streamed_response, n_chat_completion_choices)
+

Merges the streamed response returned from the litellm library. It is used when the stream parameter is set to True.

+
+
Parameters:
+
    +
  • streamed_response (List[Dict[str, Any]]) – The streamed response returned from the litellm library

  • +
  • n_chat_completion_choices (int) – The number of chat completion choices (n parameter in the completion function)

  • +
+
+
Returns:
+

The merged streams

+
+
Return type:
+

List[Dict[str, Any]]

+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.base_flows.html b/docs/built_with_sphinx/html/source/aiflows.base_flows.html new file mode 100644 index 0000000..6dc0067 --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.base_flows.html @@ -0,0 +1,662 @@ + + + + + + + + + +aiflows.base_flows package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.base_flows package

+
+

Submodules

+
+
+

aiflows.base_flows.abstract module

+
+
+class aiflows.base_flows.abstract.Flow(flow_config: Dict[str, Any])
+

Bases: ABC

+

Abstract class inherited by all Flows.

+
+
Parameters:
+

flow_config (Dict[str, Any]) – The configuration of the flow

+
+
+
+
+REQUIRED_KEYS_CONFIG = ['name', 'description']
+
+
+
+SUPPORTS_CACHING = False
+
+
+
+flow_config: Dict[str, Any]
+
+
+
+flow_state: Dict[str, Any]
+
+
+
+classmethod get_config(**overrides)
+

Returns the default config for the flow, with the overrides applied. +The default implementation construct the default config by recursively merging the configs of the base classes.

+
+
Parameters:
+

overrides (Dict[str, Any], optional) – The parameters to override in the default config

+
+
Returns:
+

The default config with the overrides applied

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+get_interface_description()
+

Returns the input and output interface description of the flow.

+
+
+
+history: FlowHistory
+
+
+
+classmethod instantiate_from_config(config)
+

Instantiates the flow from the given config.

+
+
Parameters:
+

config (Dict[str, Any]) – The config to instantiate the flow from

+
+
Returns:
+

The instantiated flow

+
+
Return type:
+

aiflows.flow.Flow

+
+
+
+
+
+classmethod instantiate_from_default_config(**overrides: Dict[str, Any] | None)
+

This method is called by the FlowLauncher to build the flow.

+
+
Parameters:
+

overrides (Dict[str, Any], optional) – The parameters to override in the default config

+
+
Returns:
+

The instantiated flow

+
+
Return type:
+

aiflows.flow.Flow

+
+
+
+
+
+classmethod instantiate_with_overrides(overrides)
+

Instantiates the flow with the given overrides.

+
+
Parameters:
+

overrides (Dict[str, Any], optional) – The parameters to override in the default config

+
+
Returns:
+

The instantiated flow

+
+
+
+
+
+property name
+

Returns the name of the flow

+
+
Returns:
+

The name of the flow

+
+
Return type:
+

str

+
+
+
+
+
+reset(full_reset: bool, recursive: bool, src_flow: Flow | str | None = 'Launcher')
+

Reset the flow state and history. If recursive is True, reset all subflows as well.

+
+
Parameters:
+
    +
  • full_reset – If True, remove all data in flow_state. If False, keep the data in flow_state.

  • +
  • recursive

  • +
  • src_flow

  • +
+
+
Returns:
+

+
+
+
+
+
+run(input_data: Dict[str, Any]) Dict[str, Any]
+

Runs the flow on the given input data. (Not implemented in the base class)

+
+
Parameters:
+

input_data (Dict[str, Any]) – The input data to run the flow on

+
+
Returns:
+

The response of the flow

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+set_up_flow_state()
+

Sets up the flow state. This method is called when the flow is instantiated, and when the flow is reset.

+
+
+
+classmethod type()
+
+
+
+
+

aiflows.base_flows.atomic module

+
+
+class aiflows.base_flows.atomic.AtomicFlow(**kwargs)
+

Bases: Flow, ABC

+

AtomicFlow is the minimal execution unit in the Flow framework. +It is an encapsulation of a single functionality that takes an input message and returns an output message.

+
+
Parameters:
+

**kwargs – Arguments to be passed to the Flow constructor

+
+
+
+
+flow_config: Dict[str, Any]
+
+
+
+flow_state: Dict[str, Any]
+
+
+
+history: FlowHistory
+
+
+
+classmethod type()
+

Returns the type of the flow.

+
+
+
+
+

aiflows.base_flows.branching module

+
+
+class aiflows.base_flows.branching.BranchingFlow(**kwargs)
+

Bases: CompositeFlow

+

This class implements a branching flow. A branching flow is a composite flow that has multiple subflows. The subflow to be executed is determined by the value of the “branch” key in the input data dictionary passed to the flow.

+
+
Parameters:
+

**kwargs – The keyword arguments passed to the CompositeFlow constructor

+
+
+
+
+run(input_data: Dict[str, Any]) Dict[str, Any]
+

Runs the branching flow. The subflow to be executed is determined by the value of the “branch” key in the input data dictionary passed to the flow.

+
+
Parameters:
+

input_data (Dict[str, Any]) – The input data dictionary

+
+
Returns:
+

The output data dictionary

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+subflows: Dict[str, Flow]
+
+
+
+classmethod type()
+

Returns the type of the flow as a string.

+
+
+
+
+

aiflows.base_flows.circular module

+
+
+class aiflows.base_flows.circular.CircularFlow(flow_config: Dict[str, Any], subflows: List[Flow])
+

Bases: CompositeFlow

+

This class represents a circular flow. It is a composite flow that runs its subflows in a circular fashion.

+
+
Parameters:
+
    +
  • flow_config (Dict[str, Any]) – The flow configuration dictionary. It must usually should contain the following keys: +- ‘max_rounds’ (int): The maximum number of rounds to run the circular flow +- ‘early_exit_key’ (str): The key in the flow state that indicates the end of the interaction +- ‘topology’ (list[Dict[str, Any]]): The topology of the circular flow (the dictionary describes the topology of one node, see TopologyNode for details) +- The keys required by CompositeFlow (subflows_config)

  • +
  • subflows (List[aiflows.base_flows.Flow]) – A list of subflows. This is necessary when instantiating the flow programmatically.

  • +
  • max_rounds (int) – The maximum number of rounds to run the circular flow

  • +
+
+
Topology:
+

The topology of the circular flow

+
+
+
+
+REQUIRED_KEYS_CONFIG = ['max_rounds', 'early_exit_key', 'topology']
+
+
+
+static input_msg_payload_builder(builder_fn)
+

This decorator registers a function as an input message payload builder.

+
+
Parameters:
+

builder_fn (Callable) – The function to register

+
+
Returns:
+

The wrapped function

+
+
Return type:
+

Callable

+
+
+
+
+
+static output_msg_payload_processor(processor_fn)
+

This decorator registers a function as an output message payload processor.

+
+
Parameters:
+

processor_fn (Callable) – The function to register

+
+
Returns:
+

The wrapped function

+
+
Return type:
+

Callable

+
+
+
+
+
+run(input_data: Dict[str, Any]) Dict[str, Any]
+

Runs the circular flow. It runs its subflows in a circular fashion (following the topology).

+
+
Parameters:
+

input_data (Dict[str, Any]) – The input data dictionary

+
+
Returns:
+

The output data dictionary

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+subflows: Dict[str, Flow]
+
+
+
+classmethod type()
+

Returns the type of the flow as a string.

+
+
+
+
+class aiflows.base_flows.circular.TopologyNode(goal, input_interface, flow: Flow, output_interface: List[DataTransformation], reset: bool)
+

Bases: object

+

This class represents a node in the topology of a flows.

+
+
Parameters:
+
    +
  • goal (str) – The goal of the node

  • +
  • input_interface (aiflows.interfaces.InputInterface) – The input interface of the node’s flow

  • +
  • flow (aiflows.base_flows.Flow) – The flow of the node

  • +
  • output_interface (List[aiflows.data_transformations.DataTransformation]) – The output interface of the node’s flow

  • +
  • reset (bool) – Whether to reset the node’s flow

  • +
+
+
+
+
+
+

aiflows.base_flows.composite module

+
+
+class aiflows.base_flows.composite.CompositeFlow(flow_config: Dict[str, Any], subflows: List[Flow])
+

Bases: Flow, ABC

+

This class implements a composite flow. It is a flow that consists of multiple sub-flows. +It is the a parent class for BranchingFlow, SequentialFlow and CircularFlow. Note that the run method of a CompositeFlow is not implemented.

+
+
Parameters:
+
    +
  • flow_config (Dict[str, Any]) – The configuration of the flow. It must usually contain the following keys: +- “subflows_config” (Dict[str,Any]): A dictionary of subflows configurations.The keys are the names of the subflows and the values are the configurations of the subflows. +This is necessary when instantiating the flow from a config file. +- The parameters required by the constructor of the parent class Flow

  • +
  • subflows (List[Flow]) – A list of subflows. This is necessary when instantiating the flow programmatically.

  • +
+
+
+
+
+REQUIRED_KEYS_CONFIG = ['subflows_config']
+
+
+
+classmethod instantiate_from_config(config)
+

Instantiates the flow from a config file.

+
+
Parameters:
+

config – The configuration of the flow. It must usually contain the following keys: +- “subflows_config” (Dict[str,Any]): A dictionary of subflows configurations.The keys are the names of the subflows and the values are the configurations of the subflows. +This is necessary when instantiating the flow from a config file. +- The parameters required by the constructor of the parent class Flow

+
+
+
+
+
+subflows: Dict[str, Flow]
+
+
+
+classmethod type()
+

Returns the type of the flow as a string.

+
+
+
+
+

aiflows.base_flows.sequential module

+
+
+class aiflows.base_flows.sequential.SequentialFlow(flow_config: Dict[str, Any], subflows: List[Flow])
+

Bases: CircularFlow

+

This class implements a sequential flow. It is a flow that consists of multiple sub-flows that are executed sequentially. +It is a child class of CircularFlow. The only difference between a SequentialFlow and a CircularFlow is that the SequentialFlow has a max_rounds of 1.

+
+
Parameters:
+
    +
  • flow_config (Dict[str, Any]) – The configuration of the flow. It must usually contain the following keys: +- “subflows_config” (Dict[str,Any]): A dictionary of subflows configurations.The keys are the names of the subflows and the values are the configurations of the subflows. +This is necessary when instantiating the flow from a config file. +- The parameters required by the constructor of the parent class Flow

  • +
  • subflows (List[Flow]) – A list of subflows. This is necessary when instantiating the flow programmatically.

  • +
+
+
+
+
+flow_config: Dict[str, Any]
+
+
+
+flow_state: Dict[str, Any]
+
+
+
+history: FlowHistory
+
+
+
+subflows: Dict[str, Flow]
+
+
+
+classmethod type()
+

Returns the type of the flow.

+
+
+
+
+

Module contents

+

Contains basic flow classes that can be used to build more complex flows.

+

AtomicFlow is the minimal execution unit in the Flow framework. +CompositeFlow is a flow that contains subflows and defines how they are executed.

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.data_transformations.html b/docs/built_with_sphinx/html/source/aiflows.data_transformations.html new file mode 100644 index 0000000..102e43e --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.data_transformations.html @@ -0,0 +1,425 @@ + + + + + + + + + +aiflows.data_transformations package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.data_transformations package

+
+

Submodules

+
+
+

aiflows.data_transformations.abstract module

+
+
+class aiflows.data_transformations.abstract.DataTransformation(output_key=None)
+

Bases: ABC

+

This class is the base class for all data transformations.

+
+
Parameters:
+

output_key (str, optional) – The output key to apply the transformation to

+
+
+
+
+
+

aiflows.data_transformations.end_of_interaction module

+
+
+class aiflows.data_transformations.end_of_interaction.EndOfInteraction(output_key: str, end_of_interaction_string: str, input_key: str)
+

Bases: DataTransformation

+

This class detects if the end of interaction string is in the input string.

+
+
Parameters:
+
    +
  • output_key (str, optional) – The output key to apply the transformation to

  • +
  • end_of_interaction_string (str) – The end of interaction string to detect

  • +
  • input_key (str) – The input key to apply the transformation to

  • +
+
+
+
+
+
+

aiflows.data_transformations.json module

+
+
+class aiflows.data_transformations.json.Json2Obj(input_key: str, output_key: str | None = None)
+

Bases: DataTransformation

+

This class converts a JSON string to a Python object.

+
+
Parameters:
+
    +
  • input_key (str) – The input key to apply the transformation to

  • +
  • output_key (str, optional) – The output key to save the transformed data to

  • +
+
+
+
+
+
+class aiflows.data_transformations.json.Obj2Json(input_key: str, output_key: str | None = None)
+

Bases: DataTransformation

+

This class converts a Python object to a JSON string.

+
+
Parameters:
+
    +
  • input_key (str) – The input key to apply the transformation to

  • +
  • output_key (str, optional) – The output key to save the transformed data to

  • +
+
+
+
+
+
+

aiflows.data_transformations.key_copy module

+
+
+class aiflows.data_transformations.key_copy.KeyCopy(old_key2new_key: Dict[str, str], flatten_data_dict: bool = True)
+

Bases: DataTransformation

+

This class copies the value of a key to a new key. It can be used to rename a key.

+
+
Parameters:
+
    +
  • old_key2new_key (Dict[str, str]) – A dictionary mapping old keys to new keys

  • +
  • flatten_data_dict (bool, optional) – Whether to flatten the data dictionary before applying the transformation and unflatten it afterwards

  • +
+
+
+
+
+
+

aiflows.data_transformations.key_delete module

+
+
+class aiflows.data_transformations.key_delete.KeyDelete(keys_to_delete: List[str], flatten_data_dict: bool = True)
+

Bases: DataTransformation

+

This class deletes a list of keys from the data dictionary.

+
+
Parameters:
+
    +
  • keys_to_delete (List[str]) – A list of keys to delete

  • +
  • flatten_data_dict (bool, optional) – Whether to flatten the data dictionary before applying the transformation and unflatten it afterwards

  • +
+
+
+
+
+
+

aiflows.data_transformations.key_match_input module

+
+
+class aiflows.data_transformations.key_match_input.KeyMatchInput
+

Bases: DataTransformation

+

This class extracts all keys from the data dictionary that are required by the destination flow.

+
+
+
+

aiflows.data_transformations.key_rename module

+
+
+class aiflows.data_transformations.key_rename.KeyRename(old_key2new_key: Dict[str, str], nested_keys: bool = True)
+

Bases: DataTransformation

+

This class renames a list of keys from the data dictionary.

+
+
Parameters:
+
    +
  • old_key2new_key (Dict[str, str]) – A dictionary mapping old keys to new keys

  • +
  • nested_keys (bool, optional) – Whether to use nested keys

  • +
+
+
+
+
+
+

aiflows.data_transformations.key_select module

+
+
+class aiflows.data_transformations.key_select.KeySelect(keys_to_select: List[str], nested_keys: bool = True)
+

Bases: DataTransformation

+

This class selects a list of keys from the data dictionary.

+
+
Parameters:
+
    +
  • keys_to_select (List[str]) – A list of keys to select

  • +
  • nested_keys (bool, optional) – Whether to use nested keys

  • +
+
+
+
+
+
+

aiflows.data_transformations.key_set module

+
+
+class aiflows.data_transformations.key_set.KeySet(key2value: Dict[str, str], flatten_data_dict: bool = True)
+

Bases: DataTransformation

+

This class sets a list of keys to a given value in the data dictionary.

+
+
Parameters:
+
    +
  • key2value (Dict[str, str]) – A dictionary mapping keys to values

  • +
  • flatten_data_dict (bool, optional) – Whether to flatten the data dictionary before applying the transformation and unflatten it afterwards

  • +
+
+
+
+
+
+

aiflows.data_transformations.print_previous_messages module

+
+
+class aiflows.data_transformations.print_previous_messages.PrintPreviousMessages(last_message_only=False)
+

Bases: DataTransformation

+

This class prints the previous messages of the current flow.

+
+
Parameters:
+

last_message_only (bool, optional) – Whether to print only the last message or all previous messages

+
+
+
+
+
+

aiflows.data_transformations.regex_extractor_first module

+
+
+class aiflows.data_transformations.regex_extractor_first.RegexFirstOccurrenceExtractor(regex: str, output_key: str, assert_unique: bool, strip: bool, input_key: str, regex_fallback: str | None = None, match_group: int = 0)
+

Bases: DataTransformation

+

This class extracts the first occurrence of a regex from a given input key and saves it to the output key.

+
+
Parameters:
+
    +
  • regex (str) – The regex to search for

  • +
  • output_key (str, optional) – The output key to save the transformed data to

  • +
  • assert_unique (bool, optional) – Whether to assert that the regex has only one match

  • +
  • strip (bool, optional) – Whether to strip the result

  • +
  • input_key (str) – The input key to apply the transformation to

  • +
  • regex_fallback (str, optional) – A regex to use if the first regex was not found

  • +
  • match_group (int, optional) – The match group to return

  • +
+
+
+
+
+
+

aiflows.data_transformations.unnesting_dict module

+
+
+class aiflows.data_transformations.unnesting_dict.UnNesting(input_key: str, output_key: str | None = None)
+

Bases: DataTransformation

+

This class unnests a dictionary from the data dictionary.

+
+
Parameters:
+
    +
  • input_key (str) – The key of the dictionary to unnest

  • +
  • output_key (str, optional) – The output key to save the transformed data to

  • +
+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.datasets.html b/docs/built_with_sphinx/html/source/aiflows.datasets.html new file mode 100644 index 0000000..85edf9a --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.datasets.html @@ -0,0 +1,216 @@ + + + + + + + + + +aiflows.datasets package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.datasets package

+
+

Submodules

+
+
+

aiflows.datasets.abstract module

+
+
+class aiflows.datasets.abstract.AbstractDataset(params)
+

Bases: object

+
+
A dataset implements 2 functions
    +
  • __len__ (returns the number of samples in our dataset)

  • +
  • __getitem__ (returns a sample from the dataset at the given index idx)

  • +
+
+
+
+
+
+

aiflows.datasets.demonstrations_11 module

+
+
+class aiflows.datasets.demonstrations_11.GenericDemonstrationsDataset(data=None, **kwargs)
+

Bases: AbstractDataset

+
+
+
+

aiflows.datasets.outputs module

+
+
+class aiflows.datasets.outputs.OutputsDataset(data=None, **kwargs)
+

Bases: AbstractDataset

+
+
+static get_output_data(sample_data, idx=None)
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.flow_cache.html b/docs/built_with_sphinx/html/source/aiflows.flow_cache.html new file mode 100644 index 0000000..a608776 --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.flow_cache.html @@ -0,0 +1,355 @@ + + + + + + + + + +aiflows.flow_cache package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.flow_cache package

+
+

Submodules

+
+
+

aiflows.flow_cache.flow_cache module

+
+
+class aiflows.flow_cache.flow_cache.CACHING_PARAMETERS(max_cached_entries: int = 10000, do_caching: bool = True, cache_dir: str | None = None)
+

Bases: object

+

This class contains the global caching parameters.

+
+
Parameters:
+
    +
  • max_cached_entries (int, optional) – The maximum number of cached entries

  • +
  • do_caching (bool, optional) – Whether to do caching

  • +
  • cache_dir (str, optional) – The cache directory

  • +
+
+
+
+
+cache_dir: str = None
+
+
+
+do_caching: bool = True
+
+
+
+max_cached_entries: int = 10000
+
+
+
+
+class aiflows.flow_cache.flow_cache.CachingKey(flow: Flow, input_data: Dict[str, Any], keys_to_ignore_for_hash: List[str])
+

Bases: object

+

This class contains the caching key.

+
+
Parameters:
+
    +
  • flow (Flow) – The flow

  • +
  • input_data (Dict) – The input data

  • +
  • keys_to_ignore_for_hash (List) – The keys to ignore for the hash

  • +
+
+
+
+
+flow: Flow
+
+
+
+hash_string() str
+
+
+
+input_data: Dict[str, Any]
+
+
+
+keys_to_ignore_for_hash: List[str]
+
+
+
+
+class aiflows.flow_cache.flow_cache.CachingValue(output_results: Dict, full_state: Dict, history_messages_created: List)
+

Bases: object

+

This class contains the cached value.

+
+
Parameters:
+
    +
  • output_results (Dict) – The output results

  • +
  • full_state (Dict) – The full state

  • +
  • history_messages_created (List) – The history messages created

  • +
+
+
+
+
+full_state: Dict
+
+
+
+history_messages_created: List
+
+
+
+output_results: Dict
+
+
+
+
+class aiflows.flow_cache.flow_cache.FlowCache
+

Bases: object

+

This class is the flow cache.

+
+
Parameters:
+
    +
  • index (Index) – The index

  • +
  • __lock (threading.Lock) – The lock

  • +
+
+
+
+
+get(key: str) CachingValue | None
+

Returns the cached value for the given key.

+
+
Parameters:
+

key (str) – The key

+
+
Returns:
+

The cached value

+
+
Return type:
+

Optional[CachingValue]

+
+
+
+
+
+pop(key: str)
+

Pops the cached value for the given key.

+
+
Parameters:
+

key (str) – The key

+
+
+
+
+
+set(key: str, value: CachingValue)
+

Sets the cached value for the given key.

+
+
Parameters:
+
    +
  • key (str) – The key

  • +
  • value (CachingValue) – The cached value

  • +
+
+
+
+
+
+
+aiflows.flow_cache.flow_cache.clear_cache()
+

Clears the cache.

+
+
+
+aiflows.flow_cache.flow_cache.get_cache_dir() str
+

Returns the cache directory.

+
+
Returns:
+

The cache directory

+
+
Return type:
+

str

+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.flow_launchers.html b/docs/built_with_sphinx/html/source/aiflows.flow_launchers.html new file mode 100644 index 0000000..4b742d0 --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.flow_launchers.html @@ -0,0 +1,390 @@ + + + + + + + + + +aiflows.flow_launchers package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.flow_launchers package

+
+

Submodules

+
+
+

aiflows.flow_launchers.abstract module

+
+
+class aiflows.flow_launchers.abstract.BaseLauncher
+

Bases: ABC

+

A base class for creating a model launcher.

+
+
+predict(batch: Iterable[Dict]) List[Dict]
+

Runs inference for the data provided in the batch. It returns a list of dictionaries containing the predictions. (Not Implemented for BaseLauncher)

+
+
Parameters:
+

batch (Iterable[Dict]) – An iterable of dictionaries containing the data for each sample to run inference on.

+
+
Returns:
+

A list of dictionaries containing the predictions.

+
+
Return type:
+

List[Dict]

+
+
+
+
+
+predict_dataloader(dataloader: Iterable, path_to_cache: str | None = None)
+

Runs inference for the data provided in the dataloader. (Not Implemented for BaseLauncher)

+
+
Parameters:
+
    +
  • dataloader (Iterable) – An iterable of dictionaries containing the data for each sample to run inference on.

  • +
  • path_to_cache (Optional[str], optional) – A path to a cache file containing existing predictions to use as a starting point.

  • +
+
+
+
+
+
+classmethod write_batch_output(batch: List[Dict], path_to_output_file: str, keys_to_write: List[str])
+

Class method that writes the output of a batch to a file.

+
+
Parameters:
+
    +
  • batch (List[Dict]) – A list of dictionaries containing the predictions.

  • +
  • path_to_output_file (str) – The path to the output file.

  • +
  • keys_to_write (List[str]) – A list of keys to write to file.

  • +
+
+
+
+
+
+
+class aiflows.flow_launchers.abstract.MultiThreadedAPILauncher(**kwargs)
+

Bases: BaseLauncher, ABC

+

A class for creating a multi-threaded model to query API that can make requests using multiple API keys.

+
+
Parameters:
+
    +
  • debug (bool, optional) – A boolean indicating whether to print debug information (if true, it will not run the multithreading).

  • +
  • output_dir (str, optional) – The directory to write the output files to.

  • +
  • n_workers (int, optional) – The number of workers to use in the multithreading.

  • +
  • wait_time_per_key (int, optional) – The number of seconds to wait before making another request with the same API key.

  • +
  • single_threaded (bool, optional) – A boolean indicating whether to run the multithreading or not.

  • +
+
+
+
+
+predict_dataloader(dataloader: Iterable[dict], flows_with_interfaces: List[Dict[str, Any]]) None
+

Runs inference for the data provided in the dataloader. +It writes the results to output files selected from the output_dir attributes.

+
+
Parameters:
+
    +
  • dataloader – An iterable of dictionaries containing the data for each sample to run inference on.

  • +
  • flows_with_interfaces(List[Dict]) – A list of dictionaries containing a flow instance, and an input and output interface.

  • +
+
+
+
+
+
+
+

aiflows.flow_launchers.flow_API_launcher module

+
+
+class aiflows.flow_launchers.flow_API_launcher.FlowLauncher(n_independent_samples: int, fault_tolerant_mode: bool, n_batch_retries: int, wait_time_between_retries: int, **kwargs)
+

Bases: MultiThreadedAPILauncher

+

Flow Launcher class for running inference on a flow. One can run the inference with the flow launcher in multiple ways: +- Using the launch class method: This method takes a flow and runs inference on the given data (no multithreading) and no need to instantiate the class. +- Using the predict_dataloader method: This method runs inference on the given dataloader (Requires instatiating the class). +The predict_dataloader`method can run inference in both single-threaded and multi-threaded modes (see the `MultiThreadedAPILauncher class for more details).

+
+
Parameters:
+
    +
  • n_independent_samples (int) – the number of times to independently repeat the same inference for a given sample

  • +
  • fault_tolerant_mode (bool) – whether to crash if an error occurs during the inference for a given sample

  • +
  • n_batch_retries (int) – the number of times to retry the batch if an error occurs (only used if fault_tolerant_mode is True)

  • +
  • wait_time_between_retries – the number of seconds to wait before retrying the batch (only used if fault_tolerant_mode is True)

  • +
  • **kwargs – Additional keyword arguments to instantiate the MultiThreadedAPILauncher class.

  • +
+
+
+
+
+classmethod launch(flow_with_interfaces: Dict[str, Any], data: Dict | List[Dict], path_to_output_file: str | None = None) Tuple[List[dict]]
+

Class method that takes a flow and runs inference on the given data (no multithreading) and no need to instantiate the class.

+
+
Parameters:
+
    +
  • flow_with_interfaces (Dict[str, Any]) – A dictionary containing the flow to run inference with and the input and output interfaces to use.

  • +
  • data (Union[Dict, List[Dict]]) – The data to run inference on.

  • +
  • path_to_output_file (Optional[str], optional) – A path to a file to write the outputs to.

  • +
+
+
Returns:
+

A tuple containing the full outputs and the human-readable outputs.

+
+
Return type:
+

Tuple[List[dict]]

+
+
+
+
+
+predict(batch: List[dict])
+

Runs inference for the given batch (possibly in a multithreaded fashion). This method is called by the predict_dataloader +method of the MultiThreadedAPILauncher class.

+
+
Parameters:
+

batch (List[dict]) – The batch to run inference for.

+
+
Returns:
+

The batch with the inference outputs added to it.

+
+
Return type:
+

List[dict]

+
+
+
+
+
+classmethod predict_batch(flow: Flow, batch: List[dict], input_interface: Interface | None = None, output_interface: Interface | None = None, path_to_output_file: str | None = None, keys_to_write: List[str] | None = None, n_independent_samples: int = 1, fault_tolerant_mode: bool = False, n_batch_retries: int = 1, wait_time_between_retries: int = 1)
+

Class method that runs inference on the given batch for a given flow.

+
+
Parameters:
+
    +
  • flow (Flow) – The flow to run inference with.

  • +
  • batch (List[dict]) – The batch to run inference for.

  • +
  • input_interface (Optional[Interface]) – The input interface of the flow. Default: None

  • +
  • output_interface (Optional[Interface]) – The output interface of the flow. Default: None

  • +
  • path_to_output_file (Optional[str]) – A path to a file to write the outputs to. Default: None

  • +
  • keys_to_write (Optional[List[str]]) – A list of keys to write to file. Default: None

  • +
  • n_independent_samples (Optional[int]) – the number of times to independently repeat the same inference for a given sample. Default: 1

  • +
+
+
Returns:
+

The batch with the inference outputs added to it.

+
+
Return type:
+

List[dict]

+
+
+
+
+
+static predict_sample(flow: Flow, sample: Dict, input_interface: Interface | None = None, output_interface: Interface | None = None, fault_tolerant_mode: bool = False, n_batch_retries: int = 1, wait_time_between_retries: int = 1) Tuple[Dict]
+

Static method that runs inference on a single sample with a given flow.

+
+
Flow:
+

The flow to run inference with.

+
+
Sample:
+

The sample to run inference on.

+
+
Input_interface:
+

The input interface of the flow. Default: None

+
+
Output_interface:
+

The output interface of the flow. Default: None

+
+
Fault_tolerant_mode:
+

whether to crash if an error occurs during the inference for a given sample. Default: False

+
+
N_batch_retries:
+

the number of times to retry the batch if an error occurs (only used if fault_tolerant_mode is True). Default: 1

+
+
Wait_time_between_retries:
+

the number of seconds to wait before retrying the batch (only used if fault_tolerant_mode is True). Default: 1

+
+
Returns:
+

A tuple containing the output message, the output data, and the error (if any).

+
+
Return type:
+

Tuple[Dict]

+
+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.flow_verse.html b/docs/built_with_sphinx/html/source/aiflows.flow_verse.html new file mode 100644 index 0000000..4177ba9 --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.flow_verse.html @@ -0,0 +1,697 @@ + + + + + + + + + +aiflows.flow_verse package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.flow_verse package

+
+

Submodules

+
+
+

aiflows.flow_verse.loading module

+
+
+class aiflows.flow_verse.loading.FlowModuleSpec(repo_id: str, revision: str, commit_hash: str, cache_dir: str, sync_dir: str)
+

Bases: object

+

This class contains the flow module specification.

+
+
Parameters:
+
    +
  • repo_id (str) – The repository ID

  • +
  • revision (str) – The revision

  • +
  • commit_hash (str) – The commit hash

  • +
  • cache_dir (str) – The cache directory

  • +
  • sync_dir (str) – The sync directory

  • +
+
+
+
+
+static build_mod_id(repo_id: str, revision: str)
+

Static method that builds a module ID from a repository ID and a revision.

+
+
+
+cache_dir: str
+
+
+
+commit_hash: str
+
+
+
+property mod_id
+

Returns the module ID.

+
+
+
+repo_id: str
+
+
+
+revision: str
+
+
+
+sync_dir: str
+
+
+
+
+class aiflows.flow_verse.loading.FlowModuleSpecSummary(sync_root: str, cache_root: str, mods: List[FlowModuleSpec] | None = None)
+

Bases: object

+

This class contains the flow module specification summary.

+
+
Parameters:
+
    +
  • sync_root (str) – The sync root

  • +
  • cache_root (str) – The cache root

  • +
  • mods (List[FlowModuleSpec], optional) – The modules

  • +
+
+
+
+
+add_mod(flow_mod_spec: FlowModuleSpec)
+

Adds a FlowModuleSpec object to the FlowModuleSpecSummary object.

+
+
Parameters:
+

flow_mod_spec (FlowModuleSpec) – The FlowModuleSpec object to be added.

+
+
+
+
+
+property cache_root: str
+

Returns the remote cache root.

+
+
Returns:
+

The remote cache root.

+
+
Return type:
+

str

+
+
+
+
+
+static from_flow_mod_file(file_path: str) FlowModuleSpecSummary | None
+

Reads a flow module file and returns a FlowModuleSpecSummary object.

+
+
Parameters:
+

file_path (str) – The path to the flow module file.

+
+
Returns:
+

A FlowModuleSpecSummary object if the file exists, otherwise None.

+
+
Return type:
+

Optional[“FlowModuleSpecSummary”]

+
+
Raises:
+

ValueError – If the flow module file is invalid.

+
+
+
+
+
+get_mod(repo_id: str) FlowModuleSpec | None
+

Returns the FlowModuleSpec object for the specified repository ID.

+
+
Parameters:
+

repo_id (str) – The repository ID.

+
+
Returns:
+

The FlowModuleSpec object for the specified repository ID, or None if not found.

+
+
Return type:
+

Optional[FlowModuleSpec]

+
+
+
+
+
+get_mods() List[FlowModuleSpec]
+

Returns a list of FlowModuleSpec objects.

+
+
Returns:
+

A list of FlowModuleSpec objects.

+
+
Return type:
+

List[FlowModuleSpec]

+
+
+
+
+
+serialize() str
+

Serializes the FlowModuleSpecSummary object.

+
+
Returns:
+

The serialized FlowModuleSpecSummary object.

+
+
Return type:
+

str

+
+
+
+
+
+property sync_root: str
+

Returns the sync root.

+
+
Returns:
+

The sync root.

+
+
Return type:
+

str

+
+
+
+
+
+
+aiflows.flow_verse.loading.add_to_sys_path(path)
+

Adds a path to sys.path if it’s not already there.

+
+
Parameters:
+

path (str) – The path to add

+
+
+
+
+
+aiflows.flow_verse.loading.create_empty_flow_mod_file(sync_root: str, cache_root: str, overwrite: bool = False) str
+

Creates an empty flow module file.

+
+
Parameters:
+
    +
  • sync_root (str) – The sync root

  • +
  • cache_root (str) – The cache root

  • +
  • overwrite (bool) – Whether to overwrite the existing flow module file. Defaults to False.

  • +
+
+
Returns:
+

The path to the flow module file.

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.flow_verse.loading.create_init_py(base_dir: str)
+

Creates an __init__.py file in the given directory.

+
+
Parameters:
+

base_dir (str) – The directory to create the __init__.py file in

+
+
+
+
+
+aiflows.flow_verse.loading.extract_commit_hash_from_cache_mod_dir(cache_mod_dir: str) str
+

Extracts the commit hash from a cache directory.

+
+
Parameters:
+

cache_mod_dir (str) – The cache directory

+
+
Returns:
+

The commit hash

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.flow_verse.loading.fetch_local(repo_id: str, file_path: str, sync_dir: str) FlowModuleSpec
+

Fetches a local dependency.

+
+
Parameters:
+
    +
  • repo_id (str) – The repository ID

  • +
  • file_path (str) – The file path

  • +
  • sync_dir (str) – The sync directory

  • +
+
+
Returns:
+

The flow module specification

+
+
Return type:
+

FlowModuleSpec

+
+
+
+
+
+aiflows.flow_verse.loading.fetch_remote(repo_id: str, revision: str, sync_dir: str, cache_root: str) FlowModuleSpec
+

Fetches a remote dependency.

+
+
Parameters:
+
    +
  • repo_id (str) – The repository ID

  • +
  • revision (str) – The revision

  • +
  • sync_dir (str) – The sync directory

  • +
  • cache_root (str) – The cache root

  • +
+
+
Returns:
+

The flow module specification

+
+
Return type:
+

FlowModuleSpec

+
+
+
+
+
+aiflows.flow_verse.loading.is_local_revision(legal_revision: str) bool
+

Check if a given revision is a local revision.

+
+
Parameters:
+

legal_revision (str) – A string representing the revision to check.

+
+
Returns:
+

True if the revision is a local revision, False otherwise.

+
+
Return type:
+

bool

+
+
+
+
+
+aiflows.flow_verse.loading.is_local_sync_dir_valid(sync_dir: str)
+

Returns True if the sync_dir is a valid local sync dir, False otherwise.

+
+
Parameters:
+

sync_dir (str) – The sync directory

+
+
+
+
+
+aiflows.flow_verse.loading.is_sync_dir_modified(sync_dir: str, cache_dir: str) bool
+

Returns True if the sync_dir is modified compared to the cache_dir, False otherwise.

+
+
Parameters:
+

sync_dir (str) – The sync directory

+
+
Cache_dir:
+

The cache directory

+
+
Returns:
+

True if the sync_dir is modified compared to the cache_dir, False otherwise

+
+
Return type:
+

bool

+
+
+
+
+ +

Removes a directory or a link.

+
+
Parameters:
+

sync_dir – The directory or link to remove

+
+
+
+
+
+aiflows.flow_verse.loading.retrive_commit_hash_from_remote(repo_id: str, revision: str) str
+

Retrieves the commit hash from a remote repository.

+
+
Parameters:
+
    +
  • repo_id (str) – The repository ID

  • +
  • revision (str) – The revision

  • +
+
+
Returns:
+

The commit hash

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.flow_verse.loading.sync_dependencies(dependencies: List[Dict[str, str]], all_overwrite: bool = False) List[str]
+

Synchronizes dependencies. (uses the _sync_dependencies function)

+
+
Parameters:
+
    +
  • dependencies (List[Dict[str, str]]) – The dependencies to synchronize

  • +
  • all_overwrite (bool) – Whether to overwrite all existing modules or not

  • +
+
+
Returns:
+

A list of sync directories

+
+
Return type:
+

List[str]

+
+
+
+
+
+aiflows.flow_verse.loading.sync_local_dep(previous_synced_flow_mod_spec: FlowModuleSpec | None, repo_id: str, mod_name: str, revision: str, caller_module_name: str, sync_root: str, overwrite: bool = False) FlowModuleSpec
+

Synchronize a local dependency.

+
+
Parameters:
+
    +
  • previous_synced_flow_mod_spec (Optional[FlowModuleSpec]) – The previously synced flow module specification.

  • +
  • repo_id (str) – The ID of the repository.

  • +
  • mod_name (str) – The name of the module.

  • +
  • revision (str) – The revision of the module.

  • +
  • caller_module_name (str) – The name of the caller module.

  • +
  • overwrite (bool) – Whether to overwrite the previously synced flow module specification. Defaults to False.

  • +
+
+
Returns:
+

The synced flow module specification.

+
+
Return type:
+

FlowModuleSpec

+
+
+
+
+
+aiflows.flow_verse.loading.sync_remote_dep(previous_synced_flow_mod_spec: FlowModuleSpec | None, repo_id: str, mod_name: str, revision: str, caller_module_name: str, sync_root: str, cache_root: str = '/Users/josifosk/.cache/aiflows/flow_verse', overwrite: bool = False) FlowModuleSpec
+

Synchronizes a remote dependency.

+
+
Parameters:
+
    +
  • previous_synced_flow_mod_spec (Optional[FlowModuleSpec]) – The previously synced flow module specification.

  • +
  • repo_id (str) – The ID of the repository.

  • +
  • mod_name (str) – The name of the module.

  • +
  • revision (str) – The revision of the module.

  • +
  • caller_module_name (str) – The name of the caller module.

  • +
  • cache_root (str) – The root directory of the cache. Defaults to DEFAULT_CACHE_PATH.

  • +
  • overwrite (bool) – Whether to overwrite the existing module or not. Defaults to False.

  • +
+
+
Returns:
+

The synced flow module specification.

+
+
Return type:
+

FlowModuleSpec

+
+
+
+
+
+aiflows.flow_verse.loading.validate_and_augment_dependency(dependency: Dict[str, str], caller_module_name: str) bool
+

Validates and augments a dependency dictionary.

+
+
Parameters:
+
    +
  • dependency (Dict[str, str]) – A dictionary containing information about the dependency.

  • +
  • caller_module_name (str) – The name of the calling module.

  • +
+
+
Returns:
+

True if the dependency is local, False otherwise.

+
+
Return type:
+

bool

+
+
+
+
+
+aiflows.flow_verse.loading.write_flow_mod_summary(flow_mod_summary_path: str, flow_mod_summary: FlowModuleSpecSummary)
+

Writes a flow module summary to a file.

+
+
Parameters:
+
    +
  • flow_mod_summary_path (str) – The path to the flow module summary file.

  • +
  • flow_mod_summary (FlowModuleSpecSummary) – The flow module summary.

  • +
+
+
+
+
+
+aiflows.flow_verse.loading.write_or_append_gitignore(sync_dir: str, mode: str, content: str)
+

Writes or appends a .gitignore file to the given directory.

+
+
Parameters:
+
    +
  • sync_dir (str) – The directory to write the .gitignore file to

  • +
  • mode (str) – The mode to open the file with

  • +
  • content (str) – The content to write to the file

  • +
+
+
+
+
+
+

aiflows.flow_verse.utils module

+
+
+aiflows.flow_verse.utils.build_hf_cache_path(repo_id: str, commit_hash: str, cache_root: str) str
+

Builds the path to the cache directory for a given Hugging Face model. +The path is constructed as follows: +{CACHE_ROOT}/models–{username}–{modelname}/snapshots/{commit_hash}

+
+
Parameters:
+
    +
  • repo_id (str) – The repository ID in the format of “username/modelname”.

  • +
  • commit_hash (str) – The commit hash of the model snapshot.

  • +
  • cache_root (str) – The root directory of the cache.

  • +
+
+
Returns:
+

The path to the cache directory for the given model snapshot.

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.flow_verse.utils.is_local_revision(revision: str)
+

Returns True if the revision is a local revision, False otherwise.

+
+
Parameters:
+

revision (str) – The revision to check

+
+
Returns:
+

True if the revision is a local revision, False otherwise

+
+
Return type:
+

bool

+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.history.html b/docs/built_with_sphinx/html/source/aiflows.history.html new file mode 100644 index 0000000..0474f8e --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.history.html @@ -0,0 +1,241 @@ + + + + + + + + + +aiflows.history package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.history package

+
+

Submodules

+
+
+

aiflows.history.flow_history module

+
+
+class aiflows.history.flow_history.FlowHistory
+

Bases: object

+

Represents a history of messages.

+
+
Attributes:

messages (List[Messages]): A list of the messages comprising the history of a flow.

+
+
+
+
+add_message(message: Message) None
+

Adds a message to the history.

+
+
Parameters:
+

message (Message) – The message to add.

+
+
+
+
+
+get_last_n_messages(n: int) List[Message]
+

Returns a list representation of the last n messages in the history.

+
+
Parameters:
+

n (int) – The number of messages to return.

+
+
Returns:
+

The list representation of the last n messages in the history.

+
+
Return type:
+

List[Message]

+
+
+
+
+
+to_list() List[Dict]
+

Returns a list representation of the history.

+
+
Returns:
+

The list representation of the history.

+
+
Return type:
+

List[Dict]

+
+
+
+
+
+to_string() str
+

Returns a string representation of the history.

+
+
Returns:
+

The string representation of the history.

+
+
Return type:
+

str

+
+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.html b/docs/built_with_sphinx/html/source/aiflows.html new file mode 100644 index 0000000..19079a2 --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.html @@ -0,0 +1,654 @@ + + + + + + + + + +aiflows package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows package

+
+

Subpackages

+
+ +
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.interfaces.html b/docs/built_with_sphinx/html/source/aiflows.interfaces.html new file mode 100644 index 0000000..fa73a8f --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.interfaces.html @@ -0,0 +1,204 @@ + + + + + + + + + +aiflows.interfaces package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.interfaces package

+
+

Submodules

+
+
+

aiflows.interfaces.abstract module

+
+
+class aiflows.interfaces.abstract.Interface
+

Bases: ABC

+

This class is the base class for all interfaces.

+
+
+
+

aiflows.interfaces.key_interface module

+
+
+class aiflows.interfaces.key_interface.KeyInterface(keys_to_rename: Dict[str, str] = {}, keys_to_copy: Dict[str, str] = {}, keys_to_set: Dict[str, Any] = {}, additional_transformations: List = [], keys_to_select: List[str] = [], keys_to_delete: List[str] = [])
+

Bases: ABC

+

This class is the base class for all key interfaces. It applies a list of transformations to a data dictionary.

+
+
Parameters:
+
    +
  • keys_to_rename (Dict[str, str], optional) – A dictionary mapping old keys to new keys (used to instantiate the transformation defined in the KeyRename class)

  • +
  • keys_to_copy (Dict[str, str], optional) – A dictionary mapping old keys to new keys (used to instantiate the transformation defined in the KeyCopy class)

  • +
  • keys_to_set (Dict[str, str], optional) – A dictionary mapping keys to values (used to instantiate the transformation defined in the KeySet class)

  • +
  • additional_transformations (List, optional) – A list of additional transformations to apply to the data dictionary

  • +
  • keys_to_select (List[str], optional) – A list of keys to select (used to instantiate the transformation defined in the KeySelect class)

  • +
  • keys_to_delete (List[str], optional) – A list of keys to delete (used to instantiate the transformation defined in the KeyDelete class)

  • +
+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.messages.html b/docs/built_with_sphinx/html/source/aiflows.messages.html new file mode 100644 index 0000000..c86dbce --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.messages.html @@ -0,0 +1,641 @@ + + + + + + + + + +aiflows.messages package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.messages package

+
+

Submodules

+
+
+

aiflows.messages.abstract module

+
+
+class aiflows.messages.abstract.Message(data: Dict[str, Any], created_by: str, private_keys: List[str] | None = None)
+

Bases: object

+

This class represents a message that is passed between nodes in a flow.

+
+
Parameters:
+
    +
  • data (Dict[str, Any]) – The data content of the message

  • +
  • created_by (str) – The name of the flow that created the message

  • +
  • private_keys (List[str], optional) – A list of private keys that should not be serialized or logged

  • +
+
+
+
+
+created_at: str
+
+
+
+created_by: str
+
+
+
+data: Dict[str, Any]
+
+
+
+message_id: str
+
+
+
+message_type: str
+
+
+
+private_keys: List[str]
+
+
+
+to_dict()
+

Returns a dictionary representation of the message that can be serialized to JSON

+
+
+
+to_string()
+

Returns a formatted string representation of the message that will be logged to the console

+
+
+
+
+

aiflows.messages.flow_message module

+
+
+class aiflows.messages.flow_message.InputMessage(data_dict: Dict[str, Any], src_flow: str, dst_flow: str, created_by: str | None = None, private_keys: List[str] | None = None)
+

Bases: Message

+

This class represents an input message that is passed from one flow to another.

+
+
Parameters:
+
    +
  • data_dict (Dict[str, Any]) – The data content of the message

  • +
  • src_flow (str) – The name of the flow that created the message

  • +
  • dst_flow (str) – The name of the flow that should receive the message

  • +
  • created_by (str) – The name of the flow that created the message

  • +
  • private_keys (List[str], optional) – A list of private keys that should not be serialized or logged

  • +
+
+
+
+
+static build(data_dict: Dict[str, Any], src_flow: str, dst_flow: str, private_keys: List[str] | None = None, created_by: str | None = None) InputMessage
+

Static method that builds an InputMessage object.

+
+
Parameters:
+
    +
  • data_dict (Dict[str, Any]) – The data content of the message

  • +
  • src_flow (str) – The name of the flow that created the message

  • +
  • dst_flow (str) – The name of the flow that should receive the message

  • +
  • created_by (str) – The name of the flow that created the message

  • +
  • private_keys (List[str], optional) – A list of private keys that should not be serialized or logged

  • +
+
+
Returns:
+

The built InputMessage object

+
+
Return type:
+

InputMessage

+
+
+
+
+
+created_at: str
+
+
+
+created_by: str
+
+
+
+data: Dict[str, Any]
+
+
+
+message_id: str
+
+
+
+message_type: str
+
+
+
+private_keys: List[str]
+
+
+
+to_string()
+

Returns a string representation of the message.

+
+
Returns:
+

The string representation of the message.

+
+
Return type:
+

str

+
+
+
+
+
+
+class aiflows.messages.flow_message.OutputMessage(src_flow: str, dst_flow: str, output_data: Dict[str, Any], raw_response: Dict[str, Any] | None, input_message_id: str, history: FlowHistory, created_by: str, **kwargs)
+

Bases: Message

+

This class represents an output message that is passed from one flow to another.

+
+
Parameters:
+
    +
  • src_flow (str) – The name of the flow that created the message

  • +
  • dst_flow (str) – The name of the flow that should receive the message

  • +
  • output_data (Dict[str, Any]) – The data content of the message

  • +
  • raw_response (Dict[str, Any]) – The raw response of the message

  • +
  • input_message_id (str) – The unique identification of the input message

  • +
  • history (FlowHistory) – The history of the flow

  • +
  • created_by (str) – The name of the flow that created the message

  • +
  • **kwargs – arguments that are passed to the Message constructor

  • +
+
+
+
+
+created_at: str
+
+
+
+created_by: str
+
+
+
+data: Dict[str, Any]
+
+
+
+get_output_data()
+

Returns the output data of the message.

+
+
Returns:
+

The output data of the message.

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+message_id: str
+
+
+
+message_type: str
+
+
+
+private_keys: List[str]
+
+
+
+to_string()
+

Returns a string representation of the message.

+
+
Returns:
+

The string representation of the message.

+
+
Return type:
+

str

+
+
+
+
+
+
+class aiflows.messages.flow_message.UpdateMessage_ChatMessage(content: str, role: str, updated_flow: str, **kwargs)
+

Bases: UpdateMessage_Generic

+

Updates the chat message of a flow.

+
+
Parameters:
+
    +
  • content (str) – The content of the chat message

  • +
  • role (str) – The role of the chat message (typically “user”, “assistant”, “system”, “human” …)

  • +
  • updated_flow (str) – The name of the flow that should be updated

  • +
  • **kwargs – arguments that are passed to the UpdateMessage_Generic constructor

  • +
+
+
+
+
+created_at: str
+
+
+
+created_by: str
+
+
+
+data: Dict[str, Any]
+
+
+
+message_id: str
+
+
+
+message_type: str
+
+
+
+private_keys: List[str]
+
+
+
+to_string()
+

Returns a string representation of the message.

+
+
Returns:
+

The string representation of the message.

+
+
Return type:
+

str

+
+
+
+
+
+
+class aiflows.messages.flow_message.UpdateMessage_FullReset(updated_flow: str, created_by: str, keys_deleted_from_namespace: List[str])
+

Bases: Message

+

Resets the full message of a flow.

+
+
Parameters:
+

updated_flow – The name of the flow that should be updated

+
+
+
+
+created_at: str
+
+
+
+created_by: str
+
+
+
+data: Dict[str, Any]
+
+
+
+message_id: str
+
+
+
+message_type: str
+
+
+
+private_keys: List[str]
+
+
+
+to_string()
+

Returns a string representation of the message.

+
+
Returns:
+

The string representation of the message.

+
+
Return type:
+

str

+
+
+
+
+
+
+class aiflows.messages.flow_message.UpdateMessage_Generic(updated_flow: str, **kwargs)
+

Bases: Message

+

Updates the message of a flow.

+
+
Parameters:
+
    +
  • updated_flow (str) – The name of the flow that should be updated

  • +
  • **kwargs – arguments that are passed to the Message constructor

  • +
+
+
+
+
+created_at: str
+
+
+
+created_by: str
+
+
+
+data: Dict[str, Any]
+
+
+
+message_id: str
+
+
+
+message_type: str
+
+
+
+private_keys: List[str]
+
+
+
+to_string()
+

Returns a string representation of the message.

+
+
Returns:
+

The string representation of the message.

+
+
Return type:
+

str

+
+
+
+
+
+
+class aiflows.messages.flow_message.UpdateMessage_NamespaceReset(updated_flow: str, created_by: str, keys_deleted_from_namespace: List[str])
+

Bases: Message

+

Resets the namespace of a flow’s message.

+
+
+created_at: str
+
+
+
+created_by: str
+
+
+
+data: Dict[str, Any]
+
+
+
+message_id: str
+
+
+
+message_type: str
+
+
+
+private_keys: List[str]
+
+
+
+to_string()
+

Returns a string representation of the message.

+
+
Returns:
+

The string representation of the message.

+
+
Return type:
+

str

+
+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.prompt_template.html b/docs/built_with_sphinx/html/source/aiflows.prompt_template.html new file mode 100644 index 0000000..7971d22 --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.prompt_template.html @@ -0,0 +1,240 @@ + + + + + + + + + +aiflows.prompt_template package | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.prompt_template package

+
+

Submodules

+
+
+

aiflows.prompt_template.jinja2_prompts module

+
+
+class aiflows.prompt_template.jinja2_prompts.JinjaPrompt(**kwargs)
+

Bases: object

+

This class can be used to generate prompts from jinja templates

+
+
Parameters:
+

**kwargs – See below:

+
+
Keyword Arguments:
+
    +
  • +
    input_variables (List[str]) –

    A list of variables that are required to render the template

    +
    +
    +
  • +
  • +
    partial_variables (Dict[str, Any]) –

    A dictionary of variables and their values that are required to render the template (useful when one has some variables before others)

    +
    +
    +
  • +
  • +
    template (str) –

    The jinja template to render

    +
    +
    +
  • +
+
+
+
+
+format(**kwargs)
+

format the template with the given input variables

+
+
Parameters:
+

**kwargs – The input variables to render the template (should be a subset of the input variables)

+
+
Returns:
+

The rendered template

+
+
Return type:
+

str

+
+
+
+
+
+partial(**kwargs)
+

Returns a new JinjaPrompt object, given some input variables (moves the given input variables from the input variables to the partial variables) +This method is useful when one has some variables before others

+
+
Parameters:
+

**kwargs – The input variables to render the template (should be a subset of the input variables)

+
+
Returns:
+

A new JinjaPrompt object

+
+
Return type:
+

JinjaPrompt

+
+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/aiflows.utils.html b/docs/built_with_sphinx/html/source/aiflows.utils.html new file mode 100644 index 0000000..cba917e --- /dev/null +++ b/docs/built_with_sphinx/html/source/aiflows.utils.html @@ -0,0 +1,904 @@ + + + + + + + + + +aiflows.utils package | aiFlows documentation + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows.utils package

+
+

Submodules

+
+
+

aiflows.utils.general_helpers module

+
+
+aiflows.utils.general_helpers.create_unique_id(existing_ids: List[str] | None = None)
+

creates a unique id

+
+
Parameters:
+

existing_ids (List[str], optional) – A list of existing ids to check against, defaults to None

+
+
Returns:
+

A unique id

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.utils.general_helpers.encode_from_buffer(buffer)
+

Encodes a buffer (typically an image from a video) to base64.

+
+
+
+aiflows.utils.general_helpers.encode_image(image_path)
+

Encodes an image to base64.

+
+
+
+aiflows.utils.general_helpers.exception_handler(e)
+

Handles an exception.

+
+
Parameters:
+

e (Exception) – The exception to handle

+
+
+
+
+
+aiflows.utils.general_helpers.extract_top_level_function_names(python_file_path)
+

Extracts the top level function names from a python file (ignores nested)

+
+
Parameters:
+

python_file_path (str) – The path to the python file

+
+
Returns:
+

A list of function names

+
+
Return type:
+

List[str]

+
+
+
+
+
+aiflows.utils.general_helpers.find_replace_in_dict(cfg, key_to_find, new_value, current_path='')
+

Recursively searches for keys == key_to_find in a dictionary and replaces its value with new_value. +note1: it replaces each key == key_to_find, whever it is nested in the dictionary or not. +note2: we recommend to only use this function in the Quick Start tutorial, and not in production code.

+
+
Parameters:
+
    +
  • cfg (Dict[str, Any]) – The dictionary to search in

  • +
  • key_to_find (str) – The key to find

  • +
  • new_value (Any) – The new value to set

  • +
  • current_path (str, optional) – The current path, defaults to “”

  • +
+
+
Returns:
+

The updated dictionary

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+aiflows.utils.general_helpers.flatten_dict(d, parent_key='', sep='.')
+

Flattens a dictionary.

+
+
Parameters:
+
    +
  • d (Dict[str, Any]) – The dictionary to flatten

  • +
  • parent_key (str, optional) – The parent key to use, defaults to ‘’

  • +
  • sep (str, optional) – The separator to use, defaults to ‘.’

  • +
+
+
Returns:
+

The flattened dictionary

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+aiflows.utils.general_helpers.get_current_datetime_ns()
+

Returns the current datetime in nanoseconds.

+
+
Returns:
+

The current datetime in nanoseconds

+
+
Return type:
+

int

+
+
+
+
+
+aiflows.utils.general_helpers.get_function_from_name(function_name, module)
+

Returns a function from a module given its name.

+
+
+
+aiflows.utils.general_helpers.get_predictions_dir_path(output_dir, create_if_not_exists=True)
+

Returns the path to the predictions folder.

+
+
Parameters:
+
    +
  • output_dir (str) – The output directory

  • +
  • create_if_not_exists (bool, optional) – Whether to create the folder if it does not exist, defaults to True

  • +
+
+
Returns:
+

The path to the predictions folder

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.utils.general_helpers.log_suggest_help()
+

Logs a message suggesting to get help or provide feedback on github.

+
+
+
+aiflows.utils.general_helpers.nested_keys_pop(data_dict: dict, nested_key: str) Any
+

Pop a nested key in a dictionary.

+
+
Parameters:
+
    +
  • data_dict (dict) – The dictionary to pop from.

  • +
  • nested_key (str) – The nested key to pop, in the format “key1.key2.key3”.

  • +
+
+
Returns:
+

The value of the popped key.

+
+
Return type:
+

Any

+
+
+
+
+ +

Searches for a nested key in a dictionary using a composite key string.

+
+
Parameters:
+
    +
  • search_dict (dict) – The dictionary to search in.

  • +
  • nested_key (str) – The composite key string to search for.

  • +
+
+
Returns:
+

A tuple containing the value of the nested key and a boolean indicating if the key was found.

+
+
Return type:
+

Tuple[Any, bool]

+
+
+
+
+
+aiflows.utils.general_helpers.nested_keys_update(data_dict: dict, nested_key: str, value: Any) None
+

Update the value of a nested key in a dictionary.

+
+
Parameters:
+
    +
  • data_dict (dict) – The dictionary to update.

  • +
  • nested_key (str) – The nested key to update, in the format “key1.key2.key3”.

  • +
  • value (Any) – The new value to set for the nested key.

  • +
+
+
+
+
+
+aiflows.utils.general_helpers.process_config_leafs(config: Dict | List, leaf_processor: Callable[[Tuple[Any, Any]], Any])
+

Processes the leafs of a config dictionary or list.

+
+
Parameters:
+
    +
  • config (Union[Dict, List]) – The config to process

  • +
  • leaf_processor (Callable[[Tuple[Any, Any]], Any]) – The leaf processor to use

  • +
+
+
+
+
+
+aiflows.utils.general_helpers.python_file_path_to_module_path(file_path)
+

Converts a python file path to a python module path

+
+
Parameters:
+

file_path (str) – The python file path

+
+
Returns:
+

The python module path

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.utils.general_helpers.python_module_path_to_file_path(module_path)
+

Converts a python module path to a python file path

+
+
Parameters:
+

module_path (str) – The python module path

+
+
Returns:
+

The python file path

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.utils.general_helpers.read_gzipped_jsonlines(path_to_file)
+

Reads a gzipped jsonlines file and returns a list of dictionaries.

+
+
Parameters:
+

path_to_file (str) – The path to the gzipped jsonlines file

+
+
Returns:
+

A list of dictionaries

+
+
Return type:
+

List[Dict[str, Any]]

+
+
+
+
+
+aiflows.utils.general_helpers.read_jsonlines(path_to_file)
+

Reads a jsonlines file and returns a list of dictionaries.

+
+
Parameters:
+

path_to_file (str) – The path to the jsonlines file

+
+
Returns:
+

A list of dictionaries

+
+
Return type:
+

List[Dict[str, Any]]

+
+
+
+
+
+aiflows.utils.general_helpers.read_outputs(outputs_dir)
+

Reads the outputs from a jsonlines file.

+
+
Parameters:
+

outputs_dir (str) – The directory containing the output files

+
+
Returns:
+

The outputs

+
+
Return type:
+

List[Dict[str, Any]]

+
+
+
+
+
+aiflows.utils.general_helpers.read_yaml_file(path_to_file, resolve=True)
+

Reads a yaml file.

+
+
Parameters:
+
    +
  • path_to_file (str) – The path to the yaml file

  • +
  • resolve (bool, optional) – Whether to resolve the config, defaults to True

  • +
+
+
Returns:
+

The config

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+aiflows.utils.general_helpers.recursive_dictionary_update(d, u)
+

Performs a recursive update of the values in dictionary d with the values of dictionary u

+
+
Parameters:
+
    +
  • d (Dict[str, Any]) – The dictionary to update

  • +
  • u (Dict[str, Any]) – The dictionary to update with

  • +
+
+
Returns:
+

The updated dictionary

+
+
+
+
+
+aiflows.utils.general_helpers.try_except_decorator(f)
+

A decorator that wraps the passed in function in order to handle exceptions and log a message suggesting to get help or provide feedback on github.

+
+
+
+aiflows.utils.general_helpers.unflatten_dict(d, sep='.')
+

Unflattens a dictionary.

+
+
Parameters:
+
    +
  • d (Dict[str, Any]) – The dictionary to unflatten

  • +
  • sep (str, optional) – The separator to use, defaults to ‘.’

  • +
+
+
Returns:
+

The unflattened dictionary

+
+
Return type:
+

Dict[str, Any]

+
+
+
+
+
+aiflows.utils.general_helpers.validate_flow_config(cls, flow_config)
+

Validates the flow config.

+
+
Parameters:
+
    +
  • cls (class) – The class to validate the flow config for

  • +
  • flow_config (Dict[str, Any]) – The flow config to validate

  • +
+
+
Raises:
+

ValueError – If the flow config is invalid

+
+
+
+
+
+aiflows.utils.general_helpers.write_gzipped_jsonlines(path_to_file, data, mode='w')
+

Writes a list of dictionaries to a gzipped jsonlines file.

+
+
Parameters:
+
    +
  • path_to_file (str) – The path to the gzipped jsonlines file

  • +
  • data (List[Dict[str, Any]]) – The data to write

  • +
  • mode (str, optional) – The mode to use, defaults to “w”

  • +
+
+
+
+
+
+aiflows.utils.general_helpers.write_jsonlines(path_to_file, data, mode='w')
+

Writes a list of dictionaries to a jsonlines file.

+
+
Parameters:
+
    +
  • path_to_file (str) – The path to the jsonlines file

  • +
  • data (List[Dict[str, Any]]) – The data to write

  • +
  • mode (str, optional) – The mode to use, defaults to “w”

  • +
+
+
+
+
+
+aiflows.utils.general_helpers.write_outputs(path_to_output_file, summary, mode)
+

Writes the summary to a jsonlines file.

+
+
Parameters:
+
    +
  • path_to_output_file (str) – The path to the output file

  • +
  • summary (List[Dict[str, Any]]) – The summary to write

  • +
  • mode (str) – The mode to use

  • +
+
+
+
+
+
+

aiflows.utils.io_utils module

+
+
+aiflows.utils.io_utils.load_pickle(pickle_path: str)
+

Loads data from a pickle file.

+
+
Parameters:
+

pickle_path (str) – The path to the pickle file

+
+
Returns:
+

The data loaded from the pickle file

+
+
Return type:
+

Any

+
+
+
+
+
+aiflows.utils.io_utils.recursive_json_serialize(obj)
+

Recursively serializes an object to json.

+
+
Parameters:
+

obj (Any) – The object to serialize

+
+
Returns:
+

The serialized object

+
+
Return type:
+

Any

+
+
+
+
+
+

aiflows.utils.logging module

+

Logging utilities.

+
+
+aiflows.utils.logging.add_handler(handler: Handler) None
+

adds a handler to the Flows’s root logger.

+
+
+
+aiflows.utils.logging.auto_set_dir(action=None, name=None)
+

Use logger.set_logger_dir() to set log directory to +“./.aiflows/logs/{scriptname}:{name}”. “scriptname” is the name of the main python file currently running

+
+
Parameters:
+
    +
  • action (str, optional) – an action of [“k”,”d”,”q”] to be performed when the directory exists. +When the directory exists, Will ask user by default. +-“d”: delete the directory. Note that the deletion may fail when +the directory is used by tensorboard. +-“k”: keep the directory. This is useful when you resume from a +previous training and want the directory to look as if the +training was not interrupted. +Note that this option does not load old models or any other +old states for you. It simply does nothing.

  • +
  • name (str, optional) – The name of the directory

  • +
+
+
+
+
+
+aiflows.utils.logging.disable_default_handler() None
+

Disable the default handler of the Flows’s root logger.

+
+
+
+aiflows.utils.logging.disable_propagation() None
+

Disable propagation of the library log outputs. Note that log propagation is disabled by default.

+
+
+
+aiflows.utils.logging.enable_default_handler() None
+

Enable the default handler of the Flows’s root logger.

+
+
+
+aiflows.utils.logging.enable_explicit_format() None
+

Enable explicit formatting for every Flows’s logger. The explicit formatter is as follows:

+
[LEVELNAME|FILENAME|LINE NUMBER] TIME >> MESSAGE
+
+
+

All handlers currently bound to the root logger are affected by this method.

+
+
+
+aiflows.utils.logging.enable_propagation() None
+

Enable propagation of the library log outputs. Please disable the Flows’s default handler to +prevent double logging if the root logger has been configured.

+
+
+
+aiflows.utils.logging.get_log_levels_dict()
+

Return a dictionary of all available log levels.

+
+
+
+aiflows.utils.logging.get_logger(name: str | None = None) Logger
+

Return a logger with the specified name. +This function is not supposed to be directly accessed unless you are writing a custom aiflows module.

+
+
Parameters:
+

name (str, optional) – The name of the logger to return

+
+
Returns:
+

The logger

+
+
+
+
+
+aiflows.utils.logging.get_logger_dir()
+
+
Returns:
+

The logger directory, or None if not set. +The directory is used for general logging, tensorboard events, checkpoints, etc.

+
+
Return type:
+

str

+
+
+
+
+
+aiflows.utils.logging.get_verbosity() int
+

Return the current level for the Flows’s root logger as an int.

+
+
Returns:
+

The logging level

+
+
Return type:
+

int

+
+
+
+

Note

+

Flows has following logging levels:

+
    +
  • 50: aiflows.logging.CRITICAL or aiflows.logging.FATAL

  • +
  • 40: aiflows.logging.ERROR

  • +
  • 30: aiflows.logging.WARNING or aiflows.logging.WARN

  • +
  • 20: aiflows.logging.INFO

  • +
  • 10: aiflows.logging.DEBUG

  • +
+
+
+
+
+aiflows.utils.logging.remove_handler(handler: Handler) None
+

removes given handler from the Flows’s root logger.

+
+
+
+aiflows.utils.logging.reset_format() None
+

Resets the formatting for Flows’s loggers. +All handlers currently bound to the root logger are affected by this method.

+
+
+
+aiflows.utils.logging.set_dir(dirname, action=None)
+

Set the directory for global logging. +:param dirname: log directory +:type dirname: str +:param action: an action of [“k”,”d”,”q”] to be performed when the directory exists. +When the directory exists, Will ask user by default. +- “d”: delete the directory. Note that the deletion may fail when +the directory is used by tensorboard. +- “k”: keep the directory. This is useful when you resume from a +previous training and want the directory to look as if the +training was not interrupted. +Note that this option does not load old models or any other +old states for you. It simply does nothing.

+
+
Parameters:
+
    +
  • dirname (str) – log directory

  • +
  • action (str, optional) – an action of [“k”,”d”,”q”] to be performed when the directory exists. +When the directory exists, Will ask user by default. +- “d”: delete the directory. Note that the deletion may fail when +the directory is used by tensorboard. +- “k”: keep the directory. This is useful when you resume from a +previous training and want the directory to look as if the +training was not interrupted. +Note that this option does not load old models or any other +old states for you. It simply does nothing.

  • +
+
+
+
+
+
+aiflows.utils.logging.set_verbosity(verbosity: int) None
+

Set the verbosity level for the Flows’s root logger.

+
+
Parameters:
+

verbosity (int) – Logging level. For example, it can be one of the following: +- aiflows.logging.CRITICAL or aiflows.logging.FATAL +- aiflows.logging.ERROR +- aiflows.logging.WARNING or aiflows.logging.WARN +- aiflows.logging.INFO +- aiflows.logging.DEBUG

+
+
+
+
+
+aiflows.utils.logging.set_verbosity_debug()
+

Set the verbosity to the DEBUG level.

+
+
+
+aiflows.utils.logging.set_verbosity_error()
+

Set the verbosity to the ERROR level.

+
+
+
+aiflows.utils.logging.set_verbosity_info()
+

Set the verbosity to the INFO level.

+
+
+
+aiflows.utils.logging.set_verbosity_warning()
+

Set the verbosity to the WARNING level.

+
+
+
+aiflows.utils.logging.warning_advice(self, *args, **kwargs)
+

This method is identical to logger.warning(), but if env var FLOWS_NO_ADVISORY_WARNINGS=1 is set, this +warning will not be printed

+
+
Parameters:
+
    +
  • self – The logger object

  • +
  • *args – The arguments to pass to the warning method

  • +
  • **kwargs – The keyword arguments to pass to the warning method

  • +
+
+
+
+
+
+aiflows.utils.logging.warning_once(self, *args, **kwargs)
+

This method is identical to logger.warning(), but will emit the warning with the same message only once

+
+

Note

+

The cache is for the function arguments, so 2 different callers using the same arguments will hit the cache. +The assumption here is that all warning messages are unique across the code. If they aren’t then need to switch to +another type of cache that includes the caller frame information in the hashing function.

+
+
+
+
+

aiflows.utils.rich_utils module

+
+
+aiflows.utils.rich_utils.print_config_tree(cfg: DictConfig, print_order: Sequence[str] = [], resolve: bool = False, save_to_file: bool = False) None
+

Prints content of DictConfig using Rich library and its tree structure.

+
+
Parameters:
+
    +
  • cfg (DictConfig) – Configuration composed by Hydra.

  • +
  • print_order (Sequence[str], optional) – Determines in what order config components are printed, defaults to []

  • +
  • resolve (bool, optional) – Whether to resolve reference fields of DictConfig, defaults to False

  • +
+
+
+
+
+
+

Module contents

+
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/built_with_sphinx/html/source/modules.html b/docs/built_with_sphinx/html/source/modules.html new file mode 100644 index 0000000..c3ef021 --- /dev/null +++ b/docs/built_with_sphinx/html/source/modules.html @@ -0,0 +1,260 @@ + + + + + + + + + +aiflows | aiFlows documentation + + + + + + + + + + + + + +
+ Skip to content +
+ +
+
+
+ +
+
+
+
+
+
+ +
+
+

aiflows

+
+ +
+
+
+
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/citation/index.md b/docs/citation/index.md new file mode 100644 index 0000000..7eea4a1 --- /dev/null +++ b/docs/citation/index.md @@ -0,0 +1,15 @@ +## Citation + + +To reference the 🤖🌊 **aiFlows** library, for now, please cite the paper [Flows: Building Blocks of Reasoning and Collaborating AI](https://arxiv.org/pdf/2308.01285.pdf): + +``` +@misc{josifoski2023flows, + title={Flows: Building Blocks of Reasoning and Collaborating AI}, + author={Martin Josifoski and Lars Klein and Maxime Peyrard and Yifei Li and Saibo Geng and Julian Paul Schnitzler and Yuxing Yao and Jiheng Wei and Debjit Paul and Robert West}, + year={2023}, + eprint={2308.01285}, + archivePrefix={arXiv}, + primaryClass={cs.AI} +} +``` \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..9a80d74 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,39 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information +import os +import sys +import os +import sys + +sys.path.insert(0, os.path.abspath("../../../")) + +project = "aiFlows" +copyright = "2023" +author = "aiFlow Team" + + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx_copybutton", "sphinx.ext.autodoc", "myst_parser"] + + +# extensions = ['autoapi.extension'] +# autoapi_dirs = ['./../../../flows'] + + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "sphinxawesome_theme" +html_static_path = ["_static"] +html_favicon = "../assets/flows_logo_round.png" diff --git a/docs/contributing_info/automating_documentation_flow.rst b/docs/contributing_info/automating_documentation_flow.rst new file mode 100644 index 0000000..6f8167f --- /dev/null +++ b/docs/contributing_info/automating_documentation_flow.rst @@ -0,0 +1,49 @@ +.. _automating_doc: + +=========================================================== +Automating the documentation of a Flow on the FlowVerse +=========================================================== + +Documenting your Flow is a crucial step in ensuring clarity and accessibility. Let's explore an efficient way to automate this process using pydoc-markdown. + +**1. Document Your Flow in Sphinx Format** +------------------------------------------- +Start by documenting your Flow in `Sphinx format`_. Need a reference? Check out `ChatFlowModule`_ for inspiration. + + Pro tip: Leverage VSCode's GitHub Copilot to expedite the documentation process. + +**2. Install pydoc-markdown** +------------------------------- +Ensure you have the necessary tool installed by running the following command:: + + pip install pydoc-markdown + + +**3. Navigate to Your Flow Directory** +------------------------------------------ +Go to the directory containing your Flow file:: + + cd + + +**4. Build the Markdown** +------------------------------------------ +Generate the Markdown documentation using pydoc-markdown. Replace with the name of your Flow file (excluding the `.py` extension). +For example, if your Flow file is named `Flow1.py`, execute the following command:: + + + pydoc-markdown -p Flow1 --render-toc > README.md + + +If you have multiple Flow files, consider using the following command to include all files in the documentation:: + + + pydoc-markdown -I . --render-toc > README.md + + +------ + +This process automates the generation of Markdown documentation for your Flow, streamlining the contribution process on the FlowVerse. Happy documenting! 🚀✨ + +.. _Sphinx format: https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html +.. _ChatFlowModule: https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.py diff --git a/docs/contributing_info/coding_standards.rst b/docs/contributing_info/coding_standards.rst new file mode 100644 index 0000000..33fd02a --- /dev/null +++ b/docs/contributing_info/coding_standards.rst @@ -0,0 +1,69 @@ +.. _coding_standards: + +Coding Standards +================ + +When contributing to aiFlows library, it's essential to adhere to the following coding standards to maintain consistency, readability, and the overall quality of the codebase: + +1. Simplicity and Readability +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Strive to make your code as simple and readable as possible. Use clear and meaningful variable/function names, and avoid unnecessary complexity. + +2. Best Practices +^^^^^^^^^^^^^^^^^^^^^^ + +Follow industry best practices when implementing features or fixing bugs. This includes adhering to language-specific conventions and guidelines. + +3. Documentation +^^^^^^^^^^^^^^^^^^^^^^^^ + +Document your code thoroughly. Provide comments where necessary to explain complex logic or algorithms. Use clear and concise language to describe your thought process. + +4. Docstrings in Sphinx Format +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For all new functions and classes, include docstrings in Sphinx format. These docstrings should describe the purpose, parameters, return values, and possibly exceptions raised by the function or class. Here is an example of the docstring of a function in the Sphinx format:: + + def example_function(param1, param2): + """ + Brief description of the function. + + :param param1: Description of the first parameter. + :param param2: Description of the second parameter. + :return: Description of the return value. + :raises CustomException: Description of when this exception is raised. + """ + # Function implementation + return result + +For more details on the Sphinx docstring format check out this link: `Sphinx Docstring Format`_. + +5. Backward Compatibility +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Ensure that your code changes are backward compatible whenever possible. This helps maintain the stability of the library for existing users. + +6. Thorough Testing +^^^^^^^^^^^^^^^^^^^^ + +Create comprehensive tests for your code. Tests should cover various scenarios, including edge cases, to ensure the robustness of your implementation. + +7. Test Coverage +^^^^^^^^^^^^^^^^ + +Try to maintain or increase test coverage when adding new features or modifying existing ones when needed. Aim for a high percentage of code coverage to catch potential issues early. + +8. Feature Tests +^^^^^^^^^^^^^^^^ + +When introducing new features, include corresponding tests. Every feature should have a test, and existing tests should be updated as needed. + + +--------------- + +Your dedication to simplicity, readability, and best practices is greatly appreciated. Your contributions help make the aiFlows library more accessible, robust, and user-friendly for the entire community. + +Once again, thank you for being a valued member of our community and for your commitment to making aiFlows even better. Happy coding! 🚀⭐ + + +.. _Sphinx Docstring Format: https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html \ No newline at end of file diff --git a/docs/contributing_info/contribute_index.rst b/docs/contributing_info/contribute_index.rst new file mode 100644 index 0000000..66a3c5a --- /dev/null +++ b/docs/contributing_info/contribute_index.rst @@ -0,0 +1,81 @@ +.. _contributing_index: + +Contribution Guide +========================================= + +This guide provides information on how to contribute to the aiFlows. Whether you're interested in coding, documentation, collaboration, fixing bugs or adding features you'll find useful resources here. +If you know what you're looking for, use the table of contents to jump right in. If you're new to aiFlows, start with the **Preface** section below. + +Table of Contents +----------------- +.. toctree:: + :titlesonly: + :glob: + + * + +**Preface** +---------------------------------------------- + Our goal is to make Flows a community-driven project that will benefit researchers and developers alike (see the `Why should I use aiFlows?`_ ) and to achieve this goal, we need your help. + + + You can become a part of the project in a few ways: + + - contribute to the aiFlows codebase: this will directly improve the library and benefit everyone using it + - contribute to the FlowVerse: by making your work accessible to everyone, others might improve your work and build on it, or you can build on others' work + - use the library in your creative projects, push it to its limits, and share your feedback: the proof of the pudding is in the eating, and the best way to identify promising directions, as well as important missing features, is by experimenting + - last but not least, star the repository and shout out aiFlows with your friends and colleagues; spread the word with love + + .. _ + + We will support the community in the best way we can but also lead by example. In the coming weeks, we will share: + + - a roadmap for the library (FlowViz; FlowStudio; improve flexibility, developer experience, and support for concurrency, etc. -- feedback and help would be greatly appreciated!) + - write-ups outlining features, ideas, and our long-term vision for Flows -- we encourage you to pick up any of these and start working on them in whatever way you see fit + - a version of JARVIS -- your fully customizable open-source version of ChatGPT+(++), which we will continue building in public! We hope that this excites you as much as it excites us, and JARVIS will become one of those useful projects that will constantly push the boundaries of what's possible with Flows + + .. _ + + We have tried to find a way for anyone to benefit by contributing to the project. Below we describe the envisioned workflows in more detail (we would love to hear your feedback on this -- the Discord `server `_ already has a channel for it :)). + + In a nutshell, this is just the beginning, and we have a long way to go. Stay tuned, and let's work on a great (open-source) AI future together! + + + + +**Want To Contribute to aiFlows?** +---------------------------------------------- + +Connecting With Like-Minded Contributors & How To Get Help ? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Explore the :ref:`finding_collaborators` section for resources, tips, and guidance on connecting with potential collaborators, sharing project ideas, building your dream team or how to get help. 🚀🌟 + + +Contributing To aiFlows Library: Bug Fixes and Feature Additions Guide +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Contribute to the aiFlows Library! Follow the guidelines in the :ref:`contributing_to_ai_flows` guide for bug fixes and feature additions. + Report issues on GitHub, discuss on Discord, and create pull requests. Your contributions matter! 🚀🌟 + + +Contributing To the FlowVerse: Creating New Flows and Contributing To Existing Flows Guide +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Contribute to the Flows from the FlowVerse! Follow the guidelines in the :ref:`contributing_to_FlowVerse` guide to understand how to create and publish your Flow or contribute to an existing one. 🚀 + +Automating the Generation of FlowCards (README) for the FlowVerse +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Check out the :ref:`automating_doc` guide to speed up the process of creating FlowCards (READMEs) for the FlowVerse. + +Coding Standards for aiFlows +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Review the coding standards for aiFlows Library contributions in the :ref:`coding_standards` guide. Essential guidelines ensuring a high-quality codebase. + Familiarize yourself with these standards before submitting your Pull Request. 🚀⭐ + +Contributors Wall and Sharing/Publicizing Your aiFlows Project or Flow +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Learn about contributor recognition, sharing work on Discord, and the importance of acknowledgment for aiFlows library contributions in the :ref:`recognition_info` guide. Happy contributing! 🚀🌐 + +Licence Information (Spoiler Alert: It’s Open-Source and Completely Free!) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Check out :ref:`license_info` to get quick answers about aiFlows' open-source MIT License, its free-of-charge accessibility, and you can use in commercial projects. Explore how you can contribute to the thriving aiFlows community without any worries about the legal stuff. 🚀🌟 + +.. _Why Should I Use aiFlows?: ../introduction/index.html#why-should-i-use-aiflows diff --git a/docs/contributing_info/contributing_to_FlowVerse.rst b/docs/contributing_info/contributing_to_FlowVerse.rst new file mode 100644 index 0000000..463a181 --- /dev/null +++ b/docs/contributing_info/contributing_to_FlowVerse.rst @@ -0,0 +1,62 @@ +.. _contributing_to_flowVerse: + +Recommended Workflow for Contributing to a Flow on the FlowVerse +================================================================ + +**1. Check Existing Flows & Talk to the Community** +--------------------------------------------------- + +Before initiating a new Flow, take a moment to explore whether a similar Flow already exists. Delve into our vibrant community on 🤲│flow-sharing +in Discord to check for existing Flows or reach out to the community on 🌊🔮│flow-verse. + +If the desired Flow doesn't exist, consider crafting a new post in our Discord's 🤲│flow-sharing. Share detailed information about the Flow you aim to implement +and let the community know about your initiative. 🤲│flow-sharing serves as an excellent platform to engage in discussions, seek feedback, receive assistance, and showcase your Flow. +Utilize this space to not only introduce your idea but also to foster collaboration, gather insights, and promote your Flow within the community. + +**2. Developing Your Flow - Creating or Enhancing for Contribution** +-------------------------------------------------------------------- + +Whether you're cultivating a new Flow or contributing to an existing one, this step is your guide to navigate the intricate pathways of the FlowVerse. +Consider checking out the :typical_developper_workflows: tutorial for examples on how to work on a new or existing Flow from the FlowVerse + +**2.1. Create an Organized Workspace** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Optimize your workflow by following our recommendation to establish a centralized workspace. Create a dedicated folder to house all the flows you plan to interact with in the FlowVerse. +Your structure should look like something like this: + +.. code-block:: bash + + ├── workspace-using-your-flows + └── flow_modules + ├── Flow1 + ├── Flow2 + ├── ... + +**2.2 Leverage the Flow Template** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Consider adopting our `Flow template `_ as a solid foundation for your project. This recommended structure serves as a guide, +enhancing consistency and facilitating a smoother collaborative experience. Also, check the `ChatFlowModule `_ for an example. + +**2.3. Code With Precision** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Embrace our recommended coding standards, which serve as the backbone of a robust project. Strive for code that is not only creative but +also aligns with our :ref:`coding_standards`. This commitment ensures readability, maintainability, and alignment with the broader coding community. + +**2.5. Consider Automation for Documentation (Optional)** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For those inclined towards efficiency, exploring automation for documentation may prove beneficial. Refer to `this tutorial <./automating_documentation_flow.md>`_ for a comprehensive guide on +automating the documentation process—a strategic move for systematic project management. + +**3. Engage in Dialogue on Discord** +------------------------------------- + +Engage in meaningful discussions within the `Discord community `_. Sharing your progress, seeking advice, and actively participating in conversations +not only enhances your project but also contributes to the collaborative ethos of the community. + +--- + +Remember, each contribution, no matter how small, adds to the vibrant tapestry of the FlowVerse. Happy coding! 🚀✨ diff --git a/docs/contributing_info/contributing_to_aiFlows.rst b/docs/contributing_info/contributing_to_aiFlows.rst new file mode 100644 index 0000000..1fd8b17 --- /dev/null +++ b/docs/contributing_info/contributing_to_aiFlows.rst @@ -0,0 +1,116 @@ +.. _contributing_to_ai_flows: + +Contributing to aiFlows Library (for bug fixes and adding features) +====================================================================== + +**Step 1: Identifying and Reporting an Issue / Bug** +------------------------------------------------------- + +**1.1. Check Existing Issues & Talk to the Community** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Before creating a new issue, check if the problem you've encountered already exists. If it does, consider commenting on the existing issue to +provide additional details or express your interest in working on it. + +Community Discussion on Discord: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Additionally, for more immediate interaction and collaboration, you can discuss the issue on the project's `Discord`_ channel. +Join the 💻│developers or 🐛│debugging channels to connect with the community, seek advice, and coordinate efforts. Engaging with the +community on Discord can provide valuable insights and assistance throughout the issue resolution process. + +**1.2. Creating a New Issue** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If the issue doesn't exist, create a new one. Include a clear and concise title, detailed description of the problem, and steps to reproduce it. +Utilize the "Report a Bug" template for bug reports and the "Feature Request" template for suggesting new features. + +**Step 2: Getting Started with a Pull Request (PR)** +---------------------------------------------------------- + +**2.0. Inform the Community** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Comment on the issue you're working on, informing others that you're actively working on a solution. +Provide progress updates if needed. Also, inform the community on our `Discord`_ 🔨│community-projects forum that you're working on it. +Engage with the community, share your ideas, and seek feedback on your pull request. This open communication is crucial not only for +collaboration but also to inform others that you're actively working on the issue. This helps prevent duplicate work and ensures that community members are aware of ongoing efforts, +fostering a collaborative and efficient development environment. + +**2.1. Fork the Repository** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +On the "aiflows" GitHub page, click "Fork" to create a copy of the repository under your GitHub account. + +**2.2. Clone Your Fork** +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Clone the forked repository to your local machine using the following command:: + + git clone https://github.com/your-username/aiflows.git + +**2.3. Create a New Branch** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Create a new branch for your fix or feature:: + + git checkout -b fix-branch + +**Step 3: Coding and Making a Pull Request** +-------------------------------------------- + +**3.1 Make Changes & And adhere to aiFlow's coding practices** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Implement your fix or feature. Follow best practices, and consider the project's :ref:`coding_standards`. + +**3.2. Commit Changes** +^^^^^^^^^^^^^^^^^^^^^^^ + +Commit your changes with clear and descriptive messages:: + + git add . + git commit -m "Fix: Describe the issue or feature" + +**3.3. Push Changes** +^^^^^^^^^^^^^^^^^^^^^^ + +Push your changes to your forked repository:: + + git push origin fix-branch + +**3.4. Create a Pull Request** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +On the GitHub page of your fork, create a new pull request. Ensure you select the appropriate branch in the "base" and "compare" dropdowns. +Make sure to check out this Github tutorial for more details: `Creating a pull request from a fork`_. + +**3.5. Link the pull request to an issue** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In the description or comments of your pull request, reference the issue it addresses. Use the keyword "fixes" followed by the issue number (e.g., "fixes #123"). +This helps in automatically closing the related issue when the pull request is merged. +Check out this Github tutorial for more details: `Linking a pull request to an issue`_. + +**Step 4: Addressing Reviewer Concerns** +----------------------------------------- + +**4.1. Reviewer Feedback** +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Reviewers may suggest changes to your code. Be open to feedback and make necessary adjustments. + +**4.2. Coding Style** +^^^^^^^^^^^^^^^^^^^^^^ + +Ensure your code aligns with the project's coding style. If unsure, refer to the project's documentation or ask for clarification. + +--------------- + +Thank you for considering contributing to the aiFlows library! Your dedication and effort are immensely appreciated. +Contributors like you make a significant impact, and we want to express our gratitude. +Remember, your name will proudly appear on our contributors' wall, showcasing your valuable contributions to the aiFlows project 🚀🔥 + +.. _Creating a pull request from a fork: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork +.. _Linking a pull request to an issue: https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue +.. _Discord: https://discord.gg/yFZkpD2HAh \ No newline at end of file diff --git a/docs/contributing_info/finding_collaborators.rst b/docs/contributing_info/finding_collaborators.rst new file mode 100644 index 0000000..eac9614 --- /dev/null +++ b/docs/contributing_info/finding_collaborators.rst @@ -0,0 +1,38 @@ +.. _finding_collaborators: + +================================ +Looking for Collaborators ? +================================ + +🤝 Seeking Collaborators? If you're on the lookout for a collaborator to tackle an issue or work on a feature, head over to the `👥│flows-friends`_ forum on Discord. +Share your project ideas, highlight your skills, or specify areas where you could use assistance. For more targeted searches, consider posting in specialized channels, +such as the `🔬│research`_ channel if you're seeking a researcher. Your dream team may just be a click away. Let the collaboration begin! 🚀 + +Looking for Collaborators - FAQ +------------------------------- + +**1. I’m Encountering Issues With Debugging. How Can the Community Help?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 🕵️ If you're in need of debugging support, head over to the `🐛│debugging`_ channel on Discord. + Engaging with the community there can provide valuable insights and assistance in resolving your issues. + +**2. Where Can I Get Feedback on My Work?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 📣 For feedback on your work, visit the appropriate `Discord`_ channel based on your project or focus. + For FlowVerse-related projects, check out channels like Flow-sharing or Flow-verse. If you're involved in research, head to the `🔬│research`_ channel. General + development queries can be directed to the developers channel. Community-projects are also a great space for feedback. + +**3. I’m Looking To Brainstorm Ideas. Where Can I Discuss Them With the Community?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 💡 For brainstorming sessions, consider discussing your ideas in channels like Flows Ideas, Flow-verse, or Developers on `Discord`_. + Engaging with the community in these spaces can lead to fruitful discussions and valuable input on your concepts. + +**4. I Don’t Have the Bandwidth/Time To Work on a Project Related to aiFlows and Would Like To Find Somebody To Collaborate With. What Should I Do?** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 🤝 If you're seeking collaborators due to time constraints, head to the `👥│flows-friends`_ channel on `Discord`_. Share your project ideas, skills, + and areas where you need assistance. You might find the perfect collaborator who shares your passion and can contribute to your project. + +.. _👥│flows-friends: https://discord.gg/yFZkpD2HAh +.. _🔬│research: https://discord.gg/yFZkpD2HAh +.. _🐛│debugging: https://discord.gg/yFZkpD2HAh +.. _Discord: https://discord.gg/yFZkpD2HAh diff --git a/docs/contributing_info/index.rst b/docs/contributing_info/index.rst new file mode 100644 index 0000000..260529e --- /dev/null +++ b/docs/contributing_info/index.rst @@ -0,0 +1,31 @@ + +Contribute +========== + + + +Our mission is to make this a community-driven project that will benefit researchers and developers alike +(see the `Why should I use aiFlows?`_ ) and to achieve this goal, we need your help. + +You can become a part of the project in a few ways: + +- contribute to the aiFlows codebase: this will directly improve the library and benefit everyone using it +- contribute to the FlowVerse: by making your work accessible to everyone, others might improve your work and build on it, or you can build on others' work +- use the library in your creative projects, push it to its limits, and share your feedback: the proof of the pudding is in the eating, and the best way to identify promising directions, +as well as important missing features, is by experimenting +- last but not least, ⭐ the repository and 📣 share aiFlows with your friends and colleagues; spread the word ❤️ + +We will support the community in the best way we can but also lead by example. In the coming weeks, we will share: + +- a roadmap for the library (FlowViz; FlowStudio; improve flexibility, developer experience, and support for concurrency, etc. -- feedback and help would be greatly appreciated!) +- write-ups outlining features, ideas, and our long-term vision for Flows -- we encourage you to pick up any of these and start working on them in whatever way you see fit +- a version of JARVIS -- your fully customizable open-source version of ChatGPT+(++), which we will continue building in public! We hope that this excites you as much as it excites us, +and JARVIS will become one of those useful projects that will constantly push the boundaries of what's possible with Flows + +We have tried to find a way for anyone to benefit by contributing to the project. The :ref:`contributing_index` contr describes our envisioned workflow and how you could get +involved in more detail (we would love to hear your feedback on it -- the Discord server already has a channel for it :). + +In a nutshell, this is just the beginning, and we have a long way to go. Stay tuned, and let's work on a great (open-source) AI future together! + + +.. _Why Should I Use aiFlows?: ../introduction/index.html diff --git a/docs/contributing_info/license_info.rst b/docs/contributing_info/license_info.rst new file mode 100644 index 0000000..7947df0 --- /dev/null +++ b/docs/contributing_info/license_info.rst @@ -0,0 +1,40 @@ +.. _license_info: + +Licence Info: Frequently Asked Questions +========================================= + +1. I’m Worried About License Issues. Is aiFlows Open-Source? +------------------------------------------------------------- + +Absolutely! aiFlows is proudly open-source, and it operates under the MIT License. + +**MIT License:** The MIT License is a permissive open-source license that grants you the freedom to use, modify, and distribute aiFlows without any restrictions. +It encourages collaboration and community contribution. + +2. Is There Any Catch? Do I Have To Pay Anything? +-------------------------------------------------- + +Not at all! aiFlows is free to use, and there's no need to worry about hidden fees. +It's a library designed to make development, research, and the creation of structured interactions seamless and accessible. + +3. Can I Use aiFlows in Commercial Projects? +---------------------------------------------- + +Yes, you can! The MIT License allows you to use aiFlows in both open-source and commercial projects. +Feel free to incorporate aiFlows into your endeavors, whether they are for research, development, or commercial applications. + +4. Are There Any Restrictions on How I Can Use aiFlows? +-------------------------------------------------------- + +Nope! The MIT License provides you with considerable freedom. You can use aiFlows in any way you see fit, modify it according to your needs, +and integrate it into your projects without worrying about restrictive conditions. + +5. How Can I Contribute to aiFlows? +------------------------------------ + +Contributions are highly welcome! Whether it's bug fixes, new features, or improvements, the community thrives on collaboration. Head over to the Contribution Guidelines to +understand how you can actively participate in making aiFlows even better. + +------ + +Remember, aiFlows is here to empower your projects and initiatives without any catches. Your contributions and engagement with the community are what make aiFlows flourish. Happy coding! 🚀✨ diff --git a/docs/contributing_info/recognition_info.rst b/docs/contributing_info/recognition_info.rst new file mode 100644 index 0000000..7243ef4 --- /dev/null +++ b/docs/contributing_info/recognition_info.rst @@ -0,0 +1,29 @@ +.. _recognition_info: + +Publicizing Your Work +===================== + +1. Do Contributors to aiFlows’ Codebase Appear on the Contributors Wall in the Library’s Next Release? +------------------------------------------------------------------------------------------------------ + +Absolutely! Contributors to aiFlows automatically earn a spot on the contributors' wall in the README section of the library's next release. Your efforts are recognized and celebrated as part of the growing community. + +2. How Can I Share My Work With the Community? +------------------------------------------------ + +Sharing your work is highly encouraged! Here are some channels on `Discord `_ to consider: + +- **For Flows On The FlowVerse:** Utilize the 🤲│flow-sharing channel and the 🔨│community-projects forum on Discord. + +- **For Contributions To aiFlows Library:** Engage with the community in the 🔨│community-projects channels. + +- **For Research Contributions:** Share your findings on the 🔬│research channel or explore opportunities in 🔨│community-projects. + +3. Are Contributors Cited for Their Contributions to Research? +------------------------------------------------------------------------- + +Absolutely. Proper recognition is key. Contributors to projects and research are, and should always be, acknowledged and cited for their valuable contributions. This not only honors your work but also builds a culture of respect and collaboration within the community. + + + +Remember, your contributions matter, and sharing your work not only benefits you but also enriches the entire aiFlows community. Happy contributing! 🚀🌐 diff --git a/docs/getting_started/Quick_Start/quick_start.md b/docs/getting_started/Quick_Start/quick_start.md new file mode 100644 index 0000000..5bfce8d --- /dev/null +++ b/docs/getting_started/Quick_Start/quick_start.md @@ -0,0 +1,157 @@ +# Quick Start + +Welcome to the exciting world of aiFlows! 🚀 + +This tutorial will guide you through your first inference runs with different Flows from the FlowVerse for the task of question answering (QA) as an example. In the process, you'll get familiar with the key aspects of the library and experience how, thanks to the modular abstraction and FlowVerse, we can trivially switch between very different pre-implemented question-answering Flows! + +The guide is organized in two sections: +1. [Section 1:](#section-1-running-your-first-qa-flow-using-a-flow-from-the-flowverse) Running your first QA Flow using a Flow from the FlowVerse 🥳 +2. [Section 2:](#section-2-flowverse-playground-notebook) FlowVerse Playground Notebook + + +## Section 1: Running your First QA Flow using a Flow from the FlowVerse + +#### By the Tutorial's End, I Will Have... +* Learned how to pull Flows from the FlowVerse +* Run my first Flow +* Understood how to pass my API information to a Flow + +While, we support many more API providers (including custom ones), for the sake of simplicity, in this tutorial, we will use OpenAI and Azure. + +### Step 1: Pull a Flow From the FlowVerse + +Explore a diverse array of Flows on the FlowVerse here. In this demonstration, we'll illustrate how to use a Flow from the FlowVerse, focusing on the `ChatAtomicFlow` within the `ChatFlowModule`. This versatile Flow utilizes a language model (LLM) via an API to generate textual responses for given textual inputs. It's worth noting the same process described here applies to any available Flow in the FlowVerse (implemented by any member of the community). + +Without further ado, let's dive in! + + + +Concretely, you would use the `sync_dependencies` function to pull the flow definition and its code from the FlowVerse: + +```python +from aiflows import flow_verse +dependencies = [ +{"url": "aiflows/ChatFlowModule", "revision": "main"} +] + +flow_verse.sync_dependencies(dependencies) +``` + +#### External Library Dependencies + + +Each Flow on the FlowVerse should include a `pip_requirements.txt` file for external library dependencies (if it doesn't have any, the file should be empty). You can check its dependencies on the FlowVerse. In general, if there are any, you need to make sure to install them. + +As you can see [here](https://huggingface.co/aiflows/ChatFlowModule/blob/main/pip_requirements.txt), the `ChatFlowModule` doesn't have any external dependencies, so we're all set. + +### Step 3: Run the Flow! +After executing `sync_dependencies`, the code implementation of `ChatFlowModule` has been pulled into the local repository. +We can now just import it: +```python +from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow +``` + +Set your API information (copy-paste it): +```python + +#OpenAI backend +api_key = "" # copy paste your api key here +api_information = [ApiInfo(backend_used="openai", api_key=api_key)] + +# Azure backend +# api_key = "" # copy paste your api key here +# api_base = "" # copy paste your api base here +# api_version = "" #copypase your api base here +# api_information = ApiInfo(backend_used = "azure", +# api_base =api_base, +# api_key = api_version, +# api_version = api_version ) +``` +Each flow from the FlowVerse should have a `demo.yaml` file, which is a demo configuration of how to instantiate the flow. + +Load the `demo.yaml` configuration: +```python +from aiflows.utils.general_helpers import read_yaml_file +# get demo configuration +cfg = read_yaml_file("flow_modules/aiflows/ChatFlowModule/demo.yaml") +``` + +An attentive reader might have noticed that the field `flow.backend.api_infos` in `demo.yaml` is set to "???" (see a snippet here below). +```yaml +flow: # Overrides the ChatAtomicFlow config + _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config + + name: "SimpleQA_Flow" + description: "A flow that answers questions." + + # ~~~ Input interface specification ~~~ + input_interface_non_initialized: + - "question" + + # ~~~ backend model parameters ~~ + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? +``` + +The following overwrites the field with your personal API information: +```python +# put the API information in the config +cfg["flow"]["backend"]["api_infos"] = api_information +``` + +Instantiate your Flow: +```python +# ~~~ Instantiate the Flow ~~~ +flow = ChatAtomicFlow.instantiate_from_default_config(**cfg["flow"]) +flow_with_interfaces = { + "flow": flow, + "input_interface": None, + "output_interface": None, +} +``` +Note that `input_interface` and `output_interface` are here to control the data that comes in and out of the flow. In this case, we don't need specific data manipulation, so we will leave to `None`. + +Load some data and run your flow with the `FlowLauncher`: +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "question": "What is the capital of France?"} + +# ~~~ Run the Flow ~~~ +_, outputs = FlowLauncher.launch( + flow_with_interfaces= flow_with_interfaces ,data=data + ) + # ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` +Congratulations! You've successfully run your first question-answering Flow! +___ +You can find this example in [runChatAtomicFlow.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/quick_start/runChatAtomicFlow.py) + +To run it, use the following commands in your terminal (make sure to copy-paste your keys first): +```bash +cd examples/quick_start/ +python runChatAtomicFlow.py +``` + +Upon execution, the result should appear as follows: +```bash +[{'api_output': 'The capital of France is Paris.'}] +``` + +## Section 2: FlowVerse Playground Notebook + +Want to quickly run some Flows from FlowVerse? Check out our jupyter notebook [flow_verse_playground.ipynb](https://github.com/epfl-dlab/aiflows/tree/main/examples/quick_start/flow_verse_playground.ipynb) where you can quicky switch between the following flows from the FlowVerse: + +* [ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule) + +* [ReAct](https://huggingface.co/aiflows/ControllerExecutorFlowModule) + +* [ChatInteractiveFlowModule](https://huggingface.co/aiflows/ChatInteractiveFlowModule) + +* [ChatWithDemonstrationsFlowModule](https://huggingface.co/aiflows/ChatWithDemonstrationsFlowModule) + +* [AutoGPTFlowModule](https://huggingface.co/aiflows/AutoGPTFlowModule) + +* [VisionFlowModule](https://huggingface.co/aiflows/VisionFlowModule) diff --git a/docs/getting_started/Tutorial/atomic_flow.md b/docs/getting_started/Tutorial/atomic_flow.md new file mode 100644 index 0000000..18a9bb2 --- /dev/null +++ b/docs/getting_started/Tutorial/atomic_flow.md @@ -0,0 +1,117 @@ +# Atomic Flow Tutorial + +This guide presents the concept of an AtomicFlow and is organized into two sections: +1. [Section 1:](#section-1-defining-atomic-flows) Defining Atomic Flows +2. [Section 2:](#section-2-writing-your-first-atomic-flow) Writing Your First Atomic Flow + +### By the Tutorial's End, I Will Have... + +* Gained insight into the relationship among a Flow, an input interface, and an output interface +* Acquired hands-on experience in creating an `AtomicFlow` with the example of `ReverseNumberAtomic` +* Learned how to run a flow with a `FlowLauncher` + +## Section 1: Defining Atomic Flows + +The `AtomicFlow` class is a subclass of `Flow` and corresponds to an Input/Output interface around a tool (note that LLMs are also tools in the Flows framework!). + +In the paper it's defined as such: + +> +> +> An `Atomic Flow` is effectively a minimal wrapper around +> a tool and achieves two things: +> 1. It fully specifies the tool (e.g., the most basic Atomic Flow around +> GPT-4 would specify the prompts and the generation parameters) +> 2. It abstracts the complexity of the internal computation by exposing only a standard message-based interface for exchanging information with other Flows. +> +> + +Examples of Atomic Flows include: +* A wrapper around an LLM ([ChatAtomicFlow](https://huggingface.co/aiflows/ChatFlowModule)) +* A search engine API ([LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule)) +* An interface with a human ([HumanStandardInputFlowModule](https://huggingface.co/aiflows/HumanStandardInputFlowModule) +) + +## Section 2: Writing Your First Atomic Flow + +As a starting example, let's create an Atomic Flow that takes a number and returns its reverse. (e.g., if the input is 1234, it should return 4321) + +The flow configuration, presented as a YAML file, is outlined below (you can also review the configuration in [reverseNumberAtomic.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverseNumberAtomic.yaml)): + +```yaml +name: "ReverseNumber" +description: "A flow that takes in a number and reverses it." + +input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_select: ["number"] + +output_interface: # Connector between the Flow's output and the caller + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + output_number: "reversed_number" # Rename the output_number to reversed_number +``` + +Breaking it down: +- The `name` and `description` parameters are self-explanatory. When defining a Flow you must always define these parameters + +- `input_interface` and `output_interface` define the transformation applied to the input and output data before and after calling the flow. In this case, the `input_interface` ensures the key `number` is in the input data dictionary and passes it to the flow. The `output_interface` renames the key `output_number` to `reversed_number` in the output data dictionary. + +Now let's define the Flow. The class would be implemented as follows (you can also check out the py file [reverse_number_atomic.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverse_number_atomic.py)): +```python +class ReverseNumberAtomicFlow(AtomicFlow): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + # Customize the logic within this function as needed for your specific flow requirements. + def run(self,input_data: Dict[str, Any]) -> Dict[str, Any]: + input_number = input_data["number"] + output_number = int(str(input_number)[::-1]) + response = {"output_number": output_number} + return response +``` +and instantiate the Flow by executing: +```python +overrides_config = read_yaml_file("reverseNumberAtomic.yaml") + +# ~~~ Instantiate the flow ~~~ +flow = ReverseNumberAtomicFlow.instantiate_from_default_config(overrides=overrides_config) +``` +Note that you can also pass a Python dictionary as the `overrides` parameter and not rely on YAML files. + +With all the preparations in place, we can now proceed to invoke our flow and execute it using the `FlowLauncher`. + +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "number": 1234} # This can be a list of samples + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" +_, outputs = FlowLauncher.launch( + flow_with_interfaces={"flow": flow}, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/) and can be executed as follows: + +```bash +cd examples/minimal\ reverse\ number/ +python reverse_number_atomic.py +``` + +Upon running, the answer you should expect is: +```bash +[{'output_number': 4321}] +``` + + +A few other notable examples of an atomic flow include the [HumanStandardInputFlowModule](https://huggingface.co/aiflows/HumanStandardInputFlowModule) and the [FixedReplyFlowModule](https://huggingface.co/aiflows/FixedReplyFlowModule) Flow. +___ + + +**Next Tutorial:** [Composite Flow Tutorial](./composite_flow.md) \ No newline at end of file diff --git a/docs/getting_started/Tutorial/autogpt_tutorial.md b/docs/getting_started/Tutorial/autogpt_tutorial.md new file mode 100644 index 0000000..57937e9 --- /dev/null +++ b/docs/getting_started/Tutorial/autogpt_tutorial.md @@ -0,0 +1,265 @@ +# AutoGPT Tutorial +**Prequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Introducing the FlowVerse with a Simple Q&A Flow Tutorial](./intro_to_FlowVerse_minimalQA.md), [ReAct Tutorial](./reAct.md), [React With Human Feedback Tutorial](./reActwHumanFeedback.md) + +This guide introduces an implementation of the AutoGPT flow. It's organized in two sections: + +1. [Section 1:](#section-1-whats-the-autogpt-flow) What's The AutoGPT flow ? +2. [Section 2:](#section-2-running-the-autogpt-flow) Running the AutoGPT Flow + +### By the Tutorial's End, I Will Have... + +* Acknowledged the differences between AutoGPT and ReActWithHumanFeedback and their implications +* Gained proficiency in executing the AutoGPTFlow +* Enhanced comprehension of intricate flow structures + +## Section 1: What's The AutoGPT flow ? + +In the previous tutorial [React With Human Feedback Tutorial](./reActwHumanFeedback.md), we introduced the `ReActWithHumanFeedback` Flow. Towards the end, while the flow demonstrated effective functionality, we observed a notable challenge, especially in prolonged conversations. The principal issue emerged when attempting to transmit the entire message history to the language model (LLM), eventually surpassing the permissible maximum token limit. As a temporary solution, we opted to send only the first two and the last messages as context to the LLM. However, this approach proves suboptimal if your objective is to enable the model to maintain a more comprehensive long-term memory. Consequently, in this tutorial, we will demonstrate how to create a basic implementation of the `AutoGPT` flow, providing a solution to tackles this issue. + +The `AutoGPT` flow is a circular flow that organizes the problem-solving process into four distinct flows: + +1. `ControllerFlow`: Given an a goal and observations (from past executions), it selects from a predefined set of actions, which are explicitly defined in the `ExecutorFlow`, the next action it should execute to get closer accomplishing its goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow` + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +3. `HumanFeedbackFlow`: This flow prompts the user for feedback on the latest execution of the `ExecutorFlow`. The collected feedback is then conveyed back to the `ControllerFlow` to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the `ReActWithHumanFeedbackFlow` if the user expresses such a preference. + +4. `MemoryFlow`: This flow is used to read and write and read memories stored of passed conversations in a database. These memories can be passed to the `ControllerFlow` enabling it to have a long term memory without having to transmit the entire message history to the language model (LLM). It's implemented with the `VectorStoreFlow` + +Here's a broad overview of the `AutoGPTFlow`: + +``` +| -------> Memory Flow -------> Controller Flow ------->| +^ | +| | +| v +| <----- HumanFeedback Flow <------- Executor Flow <----| +``` + +## Section 2 Running the AutoGPT Flow + +In this section, we'll guide you through running the ReActWithHumanFeedbackFlow. + +For the code snippets referenced from this point onward, you can find them [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/AutoGPT/). + +Now, let's delve into the details without further delay! + +Similar to the [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) tutorial (refer to that tutorial for more insights), we'll start by fetching some flows from the FlowVerse. Specifically, we'll fetch the `AutoGPTFlowModule`, which includes `ControllerFlow`, `ExecutorFlow`, and the `WikiSearchAtomicFlow`. Additionally, we'll fetch the `LCToolFlow`, a flow capable of implementing the DuckDuckGo search flow. + +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/AutoGPTFlowModule", "revision": "main"}, + {"url": "aiflows/LCToolFlowModule", "revision": "main"} +] + +flow_verse.sync_dependencies(dependencies) +``` + +If you've successfully completed the [ReAct Tutorial](./reAct.md), you are likely familiar with the fact that each flow within the FlowVerse is accompanied by a `pip_requirements.txt` file detailing external library dependencies. To further explore this, examine the [pip_requirements.txt for the LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/pip_requirements.txt), and the [pip_requirements.txt for the AutoGPTFlowModule](https://huggingface.co/aiflows/AutoGPTFlowModule/blob/main/pip_requirements.txt). You'll observe the necessity to install the following external libraries if they haven't been installed already: + +```bash +pip install duckduckgo-search==3.9.6 +pip install wikipedia==1.4.0 +pip install langchain==0.0.336 +pip install chromadb==0.3.29 +pip install faiss-cpu==1.7.4 +``` + +Now that we've fetched the flows from the FlowVerse and installed their respective requirements, we can start creating our Flow. + +The configuration for our flow is available in [AutoGPT.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/AutoGPT/AutoGPT.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. `AutoGPTFlow`'s default config can be found [here](https://huggingface.co/aiflows/AutoGPTFlowModule/blob/main/AutoGPTFlow.yaml), the `LCToolFlow` default config can be found [here](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/LCToolFlow.yaml) and memory's flow default config `VectorStoreFlow` can be found [here](https://huggingface.co/aiflows/VectorStoreFlowModule/blob/main/VectorStoreFlow.yaml) + +Our focus will be on explaining the modified parameters in the configuration, with reference to the [ReAct With Human Feedback Tutorial](./reActwHumanFeedback.md) Tutorial for unchanged parameters. +Now let's look at the flow's configuration: +```yaml +flow: + _target_: flow_modules.aiflows.AutoGPTFlowModule.AutoGPTFlow.instantiate_from_default_config + max_rounds: 30 +``` +* `_target_`: We're instantiating `AutoGPTFlow` with its default configuration and introducing some overrides, as specified below. +* `max_rounds`: The maximum number of rounds the flow can run for. + +Now let's look at the flow's `subflows_config`, which provides configuration details for ReAct's subflows—`ControllerFlow`, the `ExecutorFlow`, the `HumanFeedbackFlow` and the `MemoryFlow`: +```yaml + ### Subflows specification + subflows_config: + #ControllerFlow Configuration + Controller: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config + commands: + wiki_search: + description: "Performs a search on Wikipedia." + input_args: ["search_term"] + ddg_search: + description: "Query the search engine DuckDuckGo." + input_args: ["query"] + finish: + description: "Signal that the objective has been satisfied, and returns the answer to the user." + input_args: ["answer"] + backend: + api_infos: ??? + human_message_prompt_template: + template: |2- + Here is the response to your last action: + {{observation}} + Here is the feedback from the user: + {{human_feedback}} + input_variables: + - "observation" + - "human_feedback" + input_interface_initialized: + - "observation" + - "human_feedback" + + previous_messages: + last_k: 1 + first_k: 2 +``` +The `ControllerFlow` is identical to `ReActWithHumanFeedback`. +```yaml + #ExecutorFlow Configuration + Executor: + _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config + subflows_config: + wiki_search: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config + ddg_search: + _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + backend: + _target_: langchain.tools.DuckDuckGoSearchRun +``` +The `ExecutorFlow` is identical to `ReActWithHumanFeedback` and `ReAct`. +```yaml + #MemoryFlow Configuration + Memory: + backend: + model_name: none + api_infos: ??? +``` +The `MemoryFlow`, primarily instantiated from [AutoGPT's defaut configuration](https://huggingface.co/aiflows/AutoGPTFlowModule/blob/main/AutoGPTFlow.yaml#L87).Additionally, please refer to the `MemoryFlow`'s [FlowCard](https://huggingface.co/aiflows/VectorStoreFlowModule) for more details. + +With our configuration file in place, we can now proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (commented): + +```python +# ~~~ Set the API information ~~~ +# OpenAI backend +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] +# Azure backend +# api_information = ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") ) + +``` + +Next, load the YAML configuration, insert your API information, and define the `flow_with_interfaces` dictionary as shown below: + +```python +cfg = read_yaml_file(cfg_path) + +# put the API information in the config +cfg["flow"]["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information +cfg["flow"]["subflows_config"]["Memory"]["backend"]["api_infos"] = api_information +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"), + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` +Lastly, execute the flow using the FlowLauncher. +```python +data = { + "id": 0, + "goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?", +} +# At first, we retrieve information about Michael Jordan the basketball player +# If we provide feedback, only in the first round, that we are not interested in the basketball player, +# but the statistician, and skip the feedback in the next rounds, we get the correct answer + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, + data=data, + path_to_output_file=path_to_output_file, +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/AutoGPT/) and can be executed as follows: + +```bash +cd examples/AutoGPT +python run.py +``` + +Upon execution, you will be prompted for feedback on the Executor's answer. The interaction will resemble the following: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Jordan'} + +== Result +{'wiki_content': 'Michael Jeffrey Jordan (born February 17, 1963), also known by his initials MJ, is an American businessman and former professional basketball player. His profile on the official National Basketball Association (NBA) website states that "by acclamation, Michael Jordan is the greatest basketball player of all time." He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls. He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels. As a freshman, he was a member of the Tar Heels\' national championship team in 1982. Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game\'s best defensive players. His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames "Air Jordan" and "His Airness". Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat. Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season. He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards. During his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan\'s individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award. He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game). In 1999, he was named the 20th century\'s greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press\' list of athletes of the century. Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men\'s Olympic basketball team ("The Dream Team"). He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Ha'} + +[2023-12-06 09:30:40,844][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` + +You can respond with: + +``` +No I'm talking about Michael Irwin Jordan. I think he's a statistician. Maybe look him up on wikipedia? +``` + +Subsequently, ReAct will provide a response similar to this: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Irwin Jordan'} + +== Result +{'wiki_content': 'Michael Irwin Jordan (born February 25, 1956) is an American scientist, professor at the University of California, Berkeley and researcher in machine learning, statistics, and artificial intelligence.Jordan was elected a member of the National Academy of Engineering in 2010 for contributions to the foundations and applications of machine learning.\nHe is one of the leading figures in machine learning, and in 2016 Science reported him as the world\'s most influential computer scientist.In 2022, Jordan won the inaugural World Laureates Association Prize in Computer Science or Mathematics, "for fundamental contributions to the foundations of machine learning and its application."\n\n\n== Education ==\nJordan received his BS magna cum laude in Psychology in 1978 from the Louisiana State University, his MS in Mathematics in 1980 from Arizona State University and his PhD in Cognitive Science in 1985 from the University of California, San Diego. At the University of California, San Diego, Jordan was a student of David Rumelhart and a member of the Parallel Distributed Processing (PDP) Group in the 1980s.\n\n\n== Career and research ==\nJordan is the Pehong Chen Distinguished Professor at the University of California, Berkeley, where his appointment is split across EECS and Statistics. He was a professor at the Department of Brain and Cognitive Sciences at MIT from 1988 to 1998.In the 1980s Jordan started developing recurrent neural networks as a cognitive model. In recent years, his work is less driven from a cognitive perspective and more from the background of traditional statistics.\nJordan popularised Bayesian networks in the machine learning community and is known for pointing out links between machine learning and statistics. He was also prominent in the formalisation of variational methods for approximate inference and the popularisation of the expectation–maximization algorithm in machine learning.\n\n\n=== Resignation from Machine Learning ===\nIn 2001, Jordan and others resigned from the editorial board of the journal Machine Learning. In a public letter, they argued for less restrictive access and pledged support for a new open access journal, the Journal of Machine Learning Research, which was created by Leslie Kaelbling to support the evolution of the field of machine learning.\n\n\n=== Honors and awards ===\nJordan has received numerous awards, including a best student paper award (with X. Nguyen and M. Wainwright) at the International Conference on Machine Learning (ICML 2004), a best paper award (with R. Jacobs) at the American Control Conference (ACC 1991), the ACM-AAAI Allen Newell Award, the IEEE Neural Networks Pioneer Award, and an NSF Presidential Young Investigator Award. In 2002 he was named an AAAI Fellow "for significant contributions to reasoning under uncertainty, machine learning, and human motor control." In 2004 he was named an IMS Fellow "for contributions to graphical models and machine learning." In 2005 he was named an IEEE Fellow "for '} +[2023-12-06 09:53:52,058][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` +Your subsequent response could be: + +``` +There you go! I think you have it! +``` +Eventually, the flow should terminate and return something similar to: + +``` +[{'answer': 'Michael Jordan is a scientist, professor, and researcher in machine learning, statistics, and artificial intelligence. He was born on February 25, 1956.', 'status': 'finished'}] +``` + +Congratulations you've succesfully run `AutoGPTFlow` ! diff --git a/docs/getting_started/Tutorial/composite_flow.md b/docs/getting_started/Tutorial/composite_flow.md new file mode 100644 index 0000000..c143bf9 --- /dev/null +++ b/docs/getting_started/Tutorial/composite_flow.md @@ -0,0 +1,178 @@ +# Composite Flow Tutorial +**Prerequisites:** [Atomic Flow Tutorial](./atomic_flow.md) + + +This guide introduces the concept of a composite flow by illustrating the creation of a sequential flow, a specific type of composite flow. The content is structured into two main sections: +1. [Section 1:](#section-1-defining-composite-flows-and-sequential-flows) Defining Composite Flows and Sequential Flows +2. [Section 2:](#section-2-writing-your-first-sequential-flow) Writing Your First Sequential Flow + +### By the Tutorial's End, I Will Have... + +* Gained insights into the concept of a Composite Flow +* Acquired the skills to create a `SequentialFlow` through a toy example +* Developed an understanding of the utility of input and output interfaces in connecting subflows within the flow structure + + +## Section 1: Defining Composite Flows and Sequential Flows + +A `SequentialFlow` entails the sequential execution of a series of flows. It's a subclass of `CompositeFlow`. + +In the paper, a Composite Flow is described as follows: + +> +> +> Composite Flows accomplish more challenging, higher-level goals by leveraging and coordinating +> other Flows. Crucially, thanks to their local state and standardized interface, Composite Flows +> can readily invoke Atomic Flows or other Composite Flows as part of compositional, structured +> interactions of arbitrary complexity. Enabling research on effective patterns of interaction is one of +> the main goals of our work. +> +> + +Therefore, a `SequentialFlow` is a specialized form of `CompositeFlow` that runs Flows sequentially. + +Other types of Composite Flows include: +* `CircularFlow`: A series of flows excuted in a circular fashion (e.g [ReAct](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/)) +* `BranchingFlow`: A series of flows organized in a parallel fashion. The branch (Flow) executed depends on the input of the branching flow (e.g. [BranchingFlow](https://github.com/epfl-dlab/aiflows/tree/main/aiflows/base_flows/branching.py)) + +## Section 2: Writing Your First Sequential Flow + +As an introductory example, let's leverage the atomic flow created in the previous tutorial ([Atomic Flow Tutorial](./atomic_flow.md)) to construct a `SequentialFlow`. This `SequentialFlow` will take a number, reverse it, and then reverse it back again. + +Given the input number 1234, the process should unfold as follows: + +```rust +Input | Sequential Flow | Output +------------|--------------------------------------|-------------- + | | +1234 -------|---> Flow1 ---> 4321 ---> Flow2 ------|-----> 1234 + | | + | | +``` + +The flow configuration, presented as a YAML file, is outlined below (you can also review the configuration in [reverseNumberSequential.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverseNumberSequential.yaml)): +```yaml +name: "ReverseNumberTwice" +description: "A sequential flow that reverses a number twice." + +# input and output interfaces of SequentialFlow +input_interface: + - "number" + +output_interface: + - "output_number" + +#configuration of subflows +subflows_config: + first_reverse_flow: + _target_: reverse_number_atomic.ReverseNumberAtomicFlow.instantiate_from_default_config + name: "ReverseNumberFirst" + description: "A flow that takes in a number and reverses it." + second_reverse_flow: + _target_: reverse_number_atomic.ReverseNumberAtomicFlow.instantiate_from_default_config + name: "ReverseNumberSecond" + description: "A flow that takes in a number and reverses it." + +# Define order of execution of subflows and input & output interfaces for proper execution +topology: + #fist flow to execute + - goal: reverse the input number + input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_select: ["number"] + flow: first_reverse_flow + output_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + output_number: first_reverse_output + keys_to_select: ["first_reverse_output"] + reset: false + #second flow to execute + - goal: reverse the output of the first reverse + input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + first_reverse_output: number + keys_to_select: ["number"] + flow: second_reverse_flow + output_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_select: ["output_number"] + reset: false + +``` + +Breaking it down: +* The `name` and `description` parameters are self-explanatory. When defining a Flow you must always define these parameters + +* `input_interface` specifies the expected keys in the input data dictionary passed to the `SequentialFlow` + +* `output_interface` outlines the expected keys in the output data dictionary produced by the `SequentialFlow` + +* In the `subflows_config`, the specification of flows constituating the `SequentialFlow` are detailed. Each subflow is articulated as a key-item pair within a dictionary. The key denotes the name assigned to the subflow, while the corresponding item is a dictionary encapsulating the configuration of the subflow. In this instance, subflows are outlined with their default configuration, incorporating overrides for the `name` and `description` of each flow. + +* `topology` defines the order in which flows are executed within our `SequentialFlow`. +It also specifies the input and output interfaces for each flow. The fields in topology include: + * `goal`: A description of the objective of the flow at the given execution step. + * `flow`: The name of the flow to be invoked, matching the name defined in `subflows_config`. + * `input_interface`: Specifies the transformation to the input data + dictionary before passing it to the current subflow. + * `output_interface`: Specifies the transformation to the output data dictionary + before passing it to the next subflow. + * `reset`: Determines whether to reset the state and history of the flow after calling it (i.e., deletes all message history and key-value pairs (cache) saved in the flow state). + + +Note the importance of the transformations defined in the `input_interface` and `output_interface` +within the `topology`. These transformations play a crucial role in establishing a connection +between the two flows. Specifically, the `input_interface` of the `second_reverse_flow` includes a transformation +that renames the dictionary key `first_reverse_output`, which is passed by the `first_reverse_flow`, to `number`. +This ensures proper key naming and enables the seamless execution of the subsequent flow. + +Now let's instantiate the `SequentialFlow` (you can also check out the py file +[reverse_number_sequential.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/reverse_number_sequential.py)): + +```python +cfg_path = os.path.join(root_dir, "reverseNumberSequential.yaml") +cfg = read_yaml_file(cfg_path) + +# ~~~ Instantiate the flow ~~~ +flow = SequentialFlow.instantiate_from_default_config(**cfg) +``` + +There is no need to define any new class +since the `SequentialFlow` is a [base_flow](https://github.com/epfl-dlab/aiflows/tree/main/aiflows/base_flows/sequential.py) (meaning it's already defined in the aiFlows library) and we've already +defined the `ReverseNumberAtomicFlow` in the previous tutorial ([Atomic Flow Tutorial](./atomic_flow.md)) + +With all the preparations in place, we can now proceed to invoke our flow and execute it using the `FlowLauncher`. + +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "number": 1234} # This can be a list of samples + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" +_, outputs = FlowLauncher.launch( + flow_with_interfaces={"flow": flow}, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20reverse%20number/) and can be executed as follows: + +```bash +cd examples/minimal\ reverse\ number/ +python reverse_number_sequential.py +``` + +Upon running, the answer you should expect is: +``` +[{'output_number': 1234}] +``` +___ + + +**Next Tutorial:** [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) \ No newline at end of file diff --git a/docs/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.md b/docs/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.md new file mode 100644 index 0000000..68a950c --- /dev/null +++ b/docs/getting_started/Tutorial/intro_to_FlowVerse_minimalQA.md @@ -0,0 +1,229 @@ + +# Introducing the FlowVerse with a Simple Q&A Flow +**Prerequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Atomic Flow Tutorial](./atomic_flow.md) + +This guide introduces the FlowVerse via an example: [minimalQA](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/). The guide is organized in two sections: +1. [Section 1:](#section-1-whats-the-flowverse) What's the FlowVerse? +2. [Section 2:](#section-2-crafting-a-simple-qa-flow-with-the-chatflowmodule) Crafting a Simple Q&A Flow with the ChatFlowModule + +### By the Tutorial's End, I Will Have... + +* Gained an understanding of the FlowVerse and its significance +* Acquired the skills to retrieve flows from the FlowVerse +* Successfully developed my initial flow by incorporating a FlowVerse flow +* Created a Simple Q&A flow capable of managing user-assistant interactions with a Language Model (LLM) through an API +* Familiarized myself with the fundamental parameters of the `ChatAtomicFlow` + +## Section 1: What's the FlowVerse ? +The FlowVerse is the hub of flows created and shared by our amazing community for everyone to use! These flows are usually shared on Hugging Face with the intention of being reused by others. Explore our Flows on the FlowVerse [here](https://huggingface.co/aiflows)! + +## Section 2: Crafting a Simple Q&A Flow with the ChatFlowModule + +In this section, we'll guide you through the creation of a simple Q&A flow — a single user-assitant interaction with a LLM. We'll achieve this by leveraging the `ChatAtomicFlow` from the [ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule) in the FlowVerse. The `ChatAtomicFlow` seamlessly interfaces with an LLM through an API, generating textual responses for textual input. Powered by the LiteLLM library in the backend, `ChatAtomicFlow` supports various API providers; explore the full list [here](https://docs.litellm.ai/docs/providers). + +For an in-depth understanding of `ChatAtomicFlow`, refer to its [FlowCard (README)](https://huggingface.co/aiflows/ChatFlowModule/blob/main/README.md). +Note that all the code referenced from this point onwards can be found [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/) + +Let's dive in without further delay! + +First thing to do is to fetch the `ChatFlowModule` from the FlowVerse (see [run_qa_flow.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/run_qa_flow.py) to see all the code): +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/ChatFlowModule", "revision": "297c90d08087d9ff3139521f11d1a48d7dc63ed4"}, +] +flow_verse.sync_dependencies(dependencies) +``` +Let's break this down: +* `dependencies` is a list of dictionaries (in this case, there's only one) indicating which FlowModules we want to pull from the FlowVerse. The dictionary contains two key-value pairs: + * `url`: Specifies the URL where the flow can be found on Hugging Face. Here, the URL is `aiflows/ChatFlowModule`, where `aiflows` is the name of our organization on Hugging Face (or the username of a user hosting their flow on Hugging Face), and `ChatFlowModule` is the name of the FlowModule containing the `ChatAtomicFlow` on the FlowVerse. Note that the `url` is literally the address of the `ChatFlowModule` on Hugging Face (excluding the https://huggingface.co/). So if you type https://huggingface.co/aiflows/ChatFlowModule in your browser, you will find the Flow. + * `revision`: Represents the revision id (i.e., the full commit hash) of the commit we want to fetch. Note that if you set `revision` to `main`, it will fetch the latest commit on the main branch. + +Now that we've fetched the `ChatAtomicFlowModule` from the FlowVerse, we can start creating our Flow. + +The configuration for our flow is available in [simpleQA.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/simpleQA.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The default configuration can be found [here](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) + +Let's start with the input and output interface: +```yaml +input_interface: # Connector between the "input data" and the Flow + _target_: aiflows.interfaces.KeyInterface + additional_transformations: + - _target_: aiflows.data_transformations.KeyMatchInput # Pass the input parameters specified by the flow + +output_interface: # Connector between the Flow's output and the caller + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + api_output: answer # Rename the api_output to answer +``` +* `input_interface` specifies the expected keys in the input data dictionary passed to our flow. +* `output_interface` outlines the expected keys in the output data dictionary produced by our flow. + +Now let's look at the flow's configuration: +```yaml +flow: # Overrides the ChatAtomicFlow config + _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config + + name: "SimpleQA_Flow" + description: "A flow that answers questions." +``` + +* The `_target_` parameter specifies the instantiation method for our flow. In this instance, we're using it to instantiate the `ChatAtomicFlow` from [its default configuration file](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) +* `name` and `description`: self-explanatory parameters + + +```yaml + # ~~~ Input interface specification ~~~ + input_interface_non_initialized: + - "question" +``` +* The `input_interface_non_initialized` parameter in our configuration specifies the keys expected in the input data dictionary when the `ChatAtomicFlow` is called for the first time (i.e., when the system prompt is constructed). Essentially, it serves a role similar to the regular `input_interface`. The distinction becomes apparent when you require different inputs for the initial query compared to subsequent queries. For instance, in ReAct, the first time you query the LLM, the input is provided by a human, such as a question. In subsequent queries, the input comes from the execution of a tool (e.g. a query to wikipedia). In ReAct's case, these two scenarios are distinguished by `ChatAtomicFlow`'s `input_interface_non_initialized` and `input_interface_initialized` parameters. For this tutorial, as we're creating a simple Q&A flow performing a single user-assistant interaction with an LLM, we never use `input_interface_initialized` (which is why it's not defined in the configuration). + +```yaml + # ~~~ backend model parameters ~~ + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? + model_name: + openai: "gpt-3.5-turbo" + azure: "azure/gpt-4" + + # ~~~ generation_parameters ~~ + n: 1 + max_tokens: 3000 + temperature: 0.3 + + top_p: 0.2 + frequency_penalty: 0 + presence_penalty: 0 +``` +* `backend` is a dictionary containing parameters specific to the LLM. These parameters include: + * `api_infos` Your API information (which will be passed later for privacy reasons). + * `model_name` A dictionary with key-item pairs, where keys correspond to the `backend_used` attribute of the `ApiInfo` class for the chosen backend, and values represent the desired model for that backend. Model selection depends on the provided `api_infos`. Additional models can be added for different backends, following LiteLLM's naming conventions (refer to LiteLLM's supported providers and model names [here](https://docs.litellm.ai/docs/providers)). For instance, with an Anthropic API key and a desire to use "claude-2," one would check Anthropic's model details [here](https://docs.litellm.ai/docs/providers/anthropic#model-details). As "claude-2" is named the same in LiteLLM, the `model_name` dictionary would be updated as follows: + ```yaml + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? + model_name: + openai: "gpt-3.5-turbo" + azure: "azure/gpt-4" + anthropic: "claude-2" + ``` + * `n`,`max_tokens`,`top_p`, `frequency_penalty`, `presence_penalty` are generation parameters for LiteLLM's completion function (refer to all possible generation parameters [here](https://docs.litellm.ai/docs/completion/input#input-params-1)). + + +```yaml + # ~~~ Prompt specification ~~~ + system_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + You are a helpful chatbot that truthfully answers questions. + input_variables: [] + partial_variables: {} + + init_human_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + Answer the following question: {{question}} + input_variables: ["question"] + partial_variables: {} + +``` +* `system_message_prompt_template`: This is the system prompt template passed to the LLM. +* `init_human_message_prompt_template`: This is the user prompt template passed to the LLM the first time the flow is called. It includes the following parameters: + * `template` The prompt template in Jinja format. + * `input_variables` The input variables of the prompt. For instance, in our case, the prompt `template` + is "Answer the following question: {{question}}," and our `input_variables` is "question." Before querying the LLM, the prompt `template` is rendered by placing the input variable "question" in the placeholder "{{question}}" of the prompt `template`. It's worth noting that `input_interface_non_initialized == input_variables`. This alignment is intentional, as they are passed as input_variables to the `init_human_message_prompt_template` to render the `template` + + +Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment): +```python + # ~~~ Set the API information ~~~ +# OpenAI backend + +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] + +# # Azure backend +# api_information = [ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") )] + +# # Anthropic backend +#api_information = [ApiInfo(backend_used= "anthropic",api_key = os.getenv("ANTHROPIC_API_KEY"))] + +``` +Next, load the YAML configuration, insert your API information, and define the `flow_with_interfaces` dictionary: +```python + +cfg_path = os.path.join(root_dir, "simpleQA.yaml") +cfg = read_yaml_file(cfg_path) +# put api information in config (done like this for privacy reasons) +cfg["flow"]["backend"]["api_infos"] = api_information + +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"), + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` +Finally, run the flow with `FlowLauncher`. +```python +# ~~~ Get the data ~~~ +data = {"id": 0, "question": "Who was the NBA champion in 2023?"} # This can be a list of samples + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The full example is available [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/) and can be executed as follows: + +```bash +cd examples/minimal\ QA/ +python run_qa_flow.py +``` + +Upon running, the answer is similar to the following: +```bash +[{'answer': "I'm sorry, but as an AI language model, I don't have access to real-time information or the ability to predict future events. As of now, I cannot provide you with the answer to who the NBA champion was in 2023. I recommend checking reliable sports news sources or conducting an internet search for the most up-to-date information."}] +``` +To learn how to obtain information on the 2023 NBA Champion using Flows, refer to the next tutorial [ReAct](./reAct.md), a Flow that provides `ChatAtomicFlow` to tools like search engines! + +Additionally, the [minimal QA](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/) folder contains other examples using `ChatAtomicFlow` such as: +* Running a [Flow with Demonstrations](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/run_qa_flow_w_demonstrations.py) (encouraging the LLM to finshis answers with "my sire"). To run: + ```bash + cd examples/minimal\ QA/ + python run_qa_flow_w_demonstrations.py + ``` +* Running the [Simple Q&A flow in a multithreaded fashion](https://github.com/epfl-dlab/aiflows/tree/main/examples/minimal%20QA/run_qa_flow_multithreaded.py) in order answer multiple questions with mulitple API_keys or providers. To run: + ```bash + cd examples/minimal\ QA/ + python run_qa_flow_multithreaded.py + ``` +___ + + +**Next Tutorial:** [ReAct Tutorial](./reAct.md) + + + + + diff --git a/docs/getting_started/Tutorial/reAct.md b/docs/getting_started/Tutorial/reAct.md new file mode 100644 index 0000000..2ffe192 --- /dev/null +++ b/docs/getting_started/Tutorial/reAct.md @@ -0,0 +1,202 @@ +# ReAct Tutorial +**Prequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Introducing the FlowVerse with a Simple Q&A Flow Tutorial](./intro_to_FlowVerse_minimalQA.md), [Atomic Flow Tutorial](./atomic_flow.md), [Composite Flow Tutorial](./composite_flow.md) + +This guide introduces an implementation of the ReAct flow. The guide is organized in two sections: + +1. [Section 1:](#section-1-whats-the-react-flow) What's The ReAct Flow ? +2. [Section 2:](#section-2-running-the-react-flow) Running the ReAct Flow + +### By the Tutorial's End, I Will Have... + +* Gained an understanding of the ReAct flow and its significance +* Acquired the skills to pull multiple flows from the FlowVerse with external library dependencies +* Successfully developed my first personalized ReAct flow +* Familiarized myself with the essential parameters of the `ControllerExecutorFlow` + +## Section 1: What's The ReAct Flow ? + +The ReAct flow, as introduced in [ReAct: Synergizing Reasoning and Acting in Language Models](https://arxiv.org/pdf/2210.03629.pdf), represents a Circular flow that organizes the problem-solving process into two distinct flows: + +1. `ControllerFlow`: With a specified goal and past observations from prior executions, the `ControllerFlow` makes decisions by choosing the next action from a predefined set. These actions are explicitly defined in the `ExecutorFlow` and contribute to progressing towards the defined goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow`. + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +These steps are repeated until an answer is obtained. + +## Section 2: Running The ReAct Flow + +In this section, we'll guide you through running the ReAct Flow. + +For the code snippets referenced from this point onward, you can find them [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/) + +Now, let's delve into the details without further delay! + +Similar to the [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) tutorial (refer to that tutorial for more insights), we'll start by fetching some flows from the FlowVerse. Specifically, we'll fetch the `ControllerExecutorFlowModule`, which includes the `ControllerExecutorFlow` (the composite flow of `ControllerFlow` and `ExecutorFlow`) and the `WikiSearchAtomicFlow`. Additionally, we'll fetch the `LCToolFlow`, a flow capable of implementing the DuckDuckGo search flow. +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/LCToolFlowModule", "revision": "main"}, + {"url": "aiflows/ControllerExecutorFlowModule", "revision": "main"}, +] + +flow_verse.sync_dependencies(dependencies) +``` + +Each flow on the FlowVerse includes a `pip_requirements.txt` file for external library dependencies. Check out the [pip_requirements.txt for the LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/pip_requirements.txt)) and [pip_requirements.txt for the ControllerExecutorFlowModule](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/pip_requirements.txt). You'll notice the need to install the following external libraries: +``` +pip install wikipedia==1.4.0 +pip install langchain==0.0.336 +pip install duckduckgo-search==3.9.6 +``` + +Now that we've fetched the flows from the FlowVerse and installed their respective requirements, we can start creating our flow. + +The configuration for our flow is available in [ReAct.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/ReAct.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The `ControllerExecutorFlow`'s default config can be found [here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerExecutorFlow.yaml) and the `LCToolFlow` default config can be found [here](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/LCToolFlow.yaml). +Now let's look at the flow's configuration: +```yaml +flow: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerExecutorFlow.instantiate_from_default_config + max_rounds: 30 +``` +* The `_target_` parameter specifies the instantiation method for our flow. In this instance, we're using it to instantiate the `ControllerExecutorFlow` from its default configuration file. +* `max_rounds`: The maximum number of rounds the flow can run for. + +Now let's look at the flow's `subflows_config`, which provides configuration details for ReAct's subflows—`ControllerFlow` and the `ExecutorFlow`: +```yaml + ### Subflows specification + subflows_config: + Controller: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config + commands: + wiki_search: + description: "Performs a search on Wikipedia." + input_args: ["search_term"] + ddg_search: + description: "Query the search engine DuckDuckGo." + input_args: ["query"] + finish: + description: "Signal that the objective has been satisfied, and returns the answer to the user." + input_args: ["answer"] + backend: + _target_: aiflows.backends.llm_lite.LiteLLMBackend + api_infos: ??? + model_name: + openai: "gpt-3.5-turbo" + azure: "azure/gpt-4" +``` +* `Controller`: The configuration of the controller flow: + * `commands`: A dictionary containing the set of actions the `ControllerFlow` can call. Each key of the dictionary is the name of the action it can excute and it's items are a another dictionary containing the following parameters: + * `description`: A description of what the action does (it's important to be clear since these descriptions are passed to the system prompt to explain to the LLM what each action can do) + * `input_args`: The list of arguments required by a given action + * `backend`: The backend used by the `ControllerFlow` (see the previous tutorial [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) for a more detailed description of the backend) +```yaml + Executor: + _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config + subflows_config: + wiki_search: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config + ddg_search: + _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + backend: + _target_: langchain.tools.DuckDuckGoSearchRun + +``` +* `Executor`: The configuration of the `ExecutorFlow`: + * `subflows_config`: The configuration of the subflows of the `ExecutorFlow`. Each subflow corresponds to an action defined in the `ControllerFlow` through the `commands` parameter. It is noteworthy that the names of the `command` keys align with the names of the subflows in the Executor's `subflow_config` + +Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment): + +```python + # ~~~ Set the API information ~~~ +# OpenAI backend +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] +# Azure backend +# api_information = [ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") )] +``` + +Next, load the YAML configuration, insert your API information, +and define the `flow_with_interfaces` dictionary: + +```python +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +root_dir = "." +cfg_path = os.path.join(root_dir, "ReAct.yaml") +cfg = read_yaml_file(cfg_path) +# put the API information in the config +cfg["flow"]["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information + +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": hydra.utils.instantiate(cfg["flow"], _recursive_=False, _convert_="partial"), + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` +Finally, run the flow with `FlowLauncher`. +```python + # ~~~ Get the data ~~~ +# This can be a list of samples +# data = {"id": 0, "goal": "Answer the following question: What is the population of Canada?"} # Uses wikipedia +data = {"id": 0, "goal": "Answer the following question: Who was the NBA champion in 2023?"} + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, data=data, path_to_output_file=path_to_output_file +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The full example is available [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReAct/) and can be executed as follows: + +```bash +cd examples/ReAct +python run.py +``` + +Upon execution, the result appears as follows: +```bash +[{'answer': 'The NBA champion in 2023 was the Denver Nuggets.', 'status': 'finished'}] +``` +Finally we have the correct answer! + +However, let's consider a scenario where you want to instruct ReAct: + +> **Answer the following question: What is the profession and date of birth of Michael Jordan?** + +Where Michael Jordan is the Professor of Electrical Engineering and Computer Sciences and Professor of Statistics at Berkley. If you run this with ReAct, the obtained answer might look like this: + +```bash +[{'answer': 'Michael Jordan is a former professional basketball player and an American businessman. He was born on February 17, 1963.', 'status': 'finished'}] +``` +Which is not what we wanted ! This output does not align with our intended question. + +To discover how to retrieve information on Michael Jordan, the Berkeley Professor, using aiFlows, refer to the next tutorial [ReActWithHumanFeedback](./reActwHumanFeedback.md), a flow that incorporates human feedback into the ReAct flow! + +___ + + +**Next Tutorial:** [ReAct With Human Feedback Tutorial](./reActwHumanFeedback.md) + + diff --git a/docs/getting_started/Tutorial/reActwHumanFeedback.md b/docs/getting_started/Tutorial/reActwHumanFeedback.md new file mode 100644 index 0000000..c240c96 --- /dev/null +++ b/docs/getting_started/Tutorial/reActwHumanFeedback.md @@ -0,0 +1,373 @@ +# ReAct With Human Feedback Tutorial +**Prequisites:** setting up your API keys (see [setting_up_aiFlows.md](./setting_up_aiFlows.md)), [Introducing the FlowVerse with a Simple Q&A Flow Tutorial](./intro_to_FlowVerse_minimalQA.md), [ReAct Tutorial](./reAct.md) + +This guide introduces an implementation of the ReAct flow. It's organized in two sections: + +1. [Section 1:](#section-1-whats-the-react-with-human-feedback-flow) What's The ReAct With Human Feedback Flow ? +2. [Section 2:](#section-2-running-the-react-with-human-feedback-flow) Running the ReAct With Human Feedback Flow + +### By the Tutorial's End, I Will Have... + +* Recognized the distinctions between ReAct and ReActWithHumanFeedback and their consequences +* Learned how to integrate a human feedback flow into ReAct +* Incorporated customized functions into the input and output interfaces. +* Grasped the limitations of ReAct, particularly its lack of long-term memory +* Deepened my understanding of the key parameters in the `ControllerExecutorFlow` configuration + +## Section 1: What's The ReAct With Human Feedback Flow ? + +In the previous tutorial ([ReAct Tutorial](./reAct.md)), we introduced the ReAct flow. We noticed towards the end that, eventhough it works well, it can fail in some situations. For example, consider you ask the following: +> **Answer the following question: What is the profession and date of birth of Michael Jordan?** + + +In scenarios where the mentioned "Michael Jordan" refers to the Professor of Electrical Engineering and Computer Sciences and Professor of Statistics at Berkeley, ReAct may misinterpret it as the basketball player Michael Jordan and provide information about the latter. To address this, we can introduce an additional flow in our circular flow, allowing users to provide feedback on intermediate answers. This tutorial will guide you through the creation of the `ReActWithHumanFeedback` flow to handle such situations. + +The `ReActWithHumanFeedback` flow is a circular flow that organizes the problem-solving process into three distinct flows: + +1. `ControllerFlow`: With a specified goal and past observations from prior executions, the `ControllerFlow` makes decisions by choosing the next action from a predefined set. These actions are explicitly defined in the `ExecutorFlow` and contribute to progressing towards the defined goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow`. + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +3. `HumanFeedbackFlow`: This flow prompts the user for feedback on the latest execution of the `ExecutorFlow`. The collected feedback is then conveyed back to the `ControllerFlow` to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the `ReActWithHumanFeedbackFlow` if the user expresses such a preference. + +## Section 2: Running the ReAct With Human Feedback Flow + +In this section, we'll guide you through running the `ReActWithHumanFeedbackFlow`. + +For the code snippets referenced from this point onward, you can find them [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/). + +Now, let's delve into the details without further delay! + +Similar to the [Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) tutorial (refer to that tutorial for more insights), we'll start by fetching some flows from the FlowVerse. Specifically, we'll fetch the `ControllerExecutorFlowModule`, which includes the `ControllerExecutorFlow` (the composite flow of `ControllerFlow` and `ExecutorFlow`) and the `WikiSearchAtomicFlow`. Additionally, we'll fetch the `LCToolFlow`, a flow capable of implementing the DuckDuckGo search flow, and the `HumanStandardInputFlowModule`, a flow capable of gathering human feedback. + +```python +from aiflows import flow_verse +# ~~~ Load Flow dependecies from FlowVerse ~~~ +dependencies = [ + {"url": "aiflows/ControllerExecutorFlowModule", "revision": "main"}, + {"url": "aiflows/HumanStandardInputFlowModule", "revision": "main"}, + {"url": "aiflows/LCToolFlowModule", "revision": "main"}, +] + +flow_verse.sync_dependencies(dependencies) +``` + +If you've successfully completed the preceding tutorial, [ReAct Tutorial](./reAct.md), you are likely familiar with the fact that each flow within the FlowVerse is accompanied by a `pip_requirements.txt` file detailing external library dependencies. To further explore this, examine the [pip_requirements.txt for the LCToolFlowModule](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/pip_requirements.txt), the [pip_requirements.txt for the ControllerExecutorFlowModule](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/pip_requirements.txt), and the [pip_requirements.txt for the HumanStandardInputFlowModule](https://huggingface.co/aiflows/HumanStandardInputFlowModule/blob/main/pip_requirements.txt). You'll observe the necessity to install the following external libraries if they haven't been installed already: + +```bash +pip install wikipedia==1.4.0 +pip install langchain==0.0.336 +pip install duckduckgo-search==3.9.6 +``` + + +Next, in order to empower the `HumanStandardInputFlow` to terminate the `ReActWithHumanFeedback` flow, it is essential to implement a function in the `ControllerExecutorFlow` class for this specific purpose. Consequently, a new class, `ReActWithHumanFeedback`, is introduced as follows (you can find it in [ReActWithHumandFeedback.py](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/ReActWithHumanFeedback.py)): + + +```python +from typing import Dict, Any + +from aiflows.base_flows import CircularFlow +from flow_modules.aiflows.ControllerExecutorFlowModule import ControllerExecutorFlow + +class ReActWithHumanFeedback(ControllerExecutorFlow): + @CircularFlow.output_msg_payload_processor + def detect_finish_in_human_input(self, output_payload: Dict[str, Any], src_flow) -> Dict[str, Any]: + human_feedback = output_payload["human_input"] + if human_feedback.strip().lower() == "q": + return { + "EARLY_EXIT": True, + "answer": "The user has chosen to exit before a final answer was generated.", + "status": "unfinished", + } + + return {"human_feedback": human_feedback} +``` +Note that, we've simply added one function to the class which initiates the procedure to terminate the flow should the user enter "q" when prompted for feedback. + +The configuration for our flow is available in [ReActWithHumanFeedback.yaml](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/ReActWithHumanFeedback.yaml). We will now break it down into chunks and explain its various parameters. Note that the flow is instantiated from its default configuration, so we are only defining the parameters we wish to override here. The `ControllerExecutorFlow`'s default config can be found [here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerExecutorFlow.yaml) and the `LCToolFlow` default config can be found [here](https://huggingface.co/aiflows/LCToolFlowModule/blob/main/LCToolFlow.yaml). + +Our focus will be on explaining the modified parameters in the configuration, with reference to the previous tutorial for unchanged parameters. +Now let's look at the flow's configuration: +```yaml +max_rounds: 30 +``` +* `max_rounds`: The maximum number of rounds the flow can run for. + +Now let's look at the flow's `subflows_config`, which provides configuration details for ReAct's subflows—`ControllerFlow`, the `ExecutorFlow` and the `HumanFeedbackFlow`: +```yaml +### Subflows specification +subflows_config: + #ControllerFlow + Controller: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config + backend: + api_infos: ??? + commands: + wiki_search: + description: "Performs a search on Wikipedia." + input_args: ["search_term"] + ddg_search: + description: "Query the search engine DuckDuckGo." + input_args: ["query"] + finish: + description: "Signal that the objective has been satisfied, and returns the answer to the user." + input_args: ["answer"] + + human_message_prompt_template: + template: |2- + Here is the response to your last action: + {{observation}} + Here is the feedback from the user: + {{human_feedback}} + input_variables: + - "observation" + - "human_feedback" + input_interface_initialized: + - "observation" + - "human_feedback" + + previous_messages: + first_k: 2 # keep the system prompt and the original goal + last_k: 1 # keep only the last message +``` +Note that the `ControllerFlow` configuration remains nearly identical to that in the previous tutorial, [ReAct Tutorial](./reAct.md). The only differences are: +* The inclusion of an extra argument, "human_feedback," in both the `input_interface_initialized` parameter and the `input_variables` pararameter of the `human_message_prompt_template`. This is to incorporate the human's feedback in the message fed to the `ContollerFlow` +* Implementation of a mechanism to limit the number of `previous_messages` from the flow's chat history that is input to the Language Model (LLM). This limitation is crucial to prevent the Language Model (LLM) from exceeding the maximum token limit. Two parameters are overriden for this purpose: + * `first_k`: Adds the first_k earliest messages of the flow's chat history to the input of the LLM. + * `last_k`: Adds the last_k latest messages of the flow's chat history to the input of the LLM.M + + +```yaml + #ExecutorFlow + Executor: + _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config + subflows_config: + wiki_search: + _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config + ddg_search: + _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + backend: + _target_: langchain.tools.DuckDuckGoSearchRun +``` +The `ExecutorFlow` is identical to ReAct. +```yaml + HumanFeedback: + _target_: flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config + request_multi_line_input_flag: False + query_message_prompt_template: + template: |2- + Please provide feedback on the last step. + + Relevant information: + == Goal == + {{goal}} + + == Last Command == + {{command}} + + == Args + {{command_args}} + + == Result + {{observation}} + input_variables: + - "goal" + - "command" + - "command_args" + - "observation" + input_interface: + - "goal" + - "command" + - "command_args" + - "observation" +``` +`HumanFeedback`: + * `request_multi_line_input_flag`: This boolean parameter determines whether the user/human is prompted to enter a multi-line input (True) or a single-line input (False). + * `query_message_prompt_template`: This parameter involves a prompt template used to generate the message presented to the human. It includes: + * `template`: The prompt template in Jinja format. + * `input_variables` The input variables of the prompt. Note that these input variables have the same names as the placeholders "{{}}" in the `template`. Before querying the human, the template is rendered by placing the `input_variables` in the placeholders of the `template`. + * `input_interface`: Describes the expected input interface for the flow. It's noteworthy that the `input_interface` is identical to the `input_variables` of the `query_message_prompt_template`. This alignment is intentional, as they are passed as `input_variables` to the `query_message_prompt_template` to render the message presented to the user. + + +```yaml +topology: # The first two are the same as in the ControllerExecutorFlow + - goal: "Select the next action and prepare the input for the executor." + input_interface: + _target_: aiflows.interfaces.KeyInterface + additional_transformations: + - _target_: aiflows.data_transformations.KeyMatchInput + flow: Controller + output_interface: + _target_: ControllerExecutorFlow.detect_finish_or_continue + reset: false + + - goal: "Execute the action specified by the Controller." + input_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + command: branch + command_args: branch_input_data + keys_to_select: ["branch", "branch_input_data"] + flow: Executor + output_interface: + _target_: aiflows.interfaces.KeyInterface + keys_to_rename: + branch_output_data: observation + keys_to_select: ["observation"] + reset: false + + - goal: "Ask the user for feedback." + input_interface: + _target_: aiflows.interfaces.KeyInterface + flow: HumanFeedback + output_interface: + _target_: ReActWithHumanFeedback.detect_finish_in_human_input + reset: false + +``` +The default topology of the `ControllerExecutorFlow` is overriden here: +* For more details on topology, refer to the tutorial [Composite Flow](./composite_flow.md). +* The topology of the `ControllerExecutorFlow`'s default config is available [here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerExecutorFlow.yaml#L36). +* Upon comparison with the default config's topology, one would observe that the sole alteration is the incorporation of the `HumanFeedbackFlow` to the circular flow. +* Note the significance of including the `detect_finish_in_human_input` function from the `ReActWithHumanFeedback` class in the output interface. This function, as defined earlier, plays a crucial role in initiating the process of terminating the flow if the human/user provides "q" as feedback. + +Now that our configuration file is set up, we can proceed to call our flow. Begin by configuring your API information. Below is an example using an OpenAI key, along with examples for other API providers (in comment): + +```python +# ~~~ Set the API information ~~~ +# OpenAI backend +api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] +# Azure backend +# api_information = ApiInfo(backend_used = "azure", +# api_base = os.getenv("AZURE_API_BASE"), +# api_key = os.getenv("AZURE_OPENAI_KEY"), +# api_version = os.getenv("AZURE_API_VERSION") ) +```` + +Next, load the YAML configuration, insert your API information, +and define the flow_with_interfaces dictionary: + +```python +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk +root_dir = "." +cfg_path = os.path.join(root_dir, "ReActWithHumanFeedback.yaml") +cfg = read_yaml_file(cfg_path) +# put the API information in the config +cfg["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information +flow = ReActWithHumanFeedback.instantiate_from_default_config(**cfg) + +# ~~~ Instantiate the Flow ~~~ +flow_with_interfaces = { + "flow": flow, + "input_interface": ( + None + if cfg.get("input_interface", None) is None + else hydra.utils.instantiate(cfg["input_interface"], _recursive_=False) + ), + "output_interface": ( + None + if cfg.get("output_interface", None) is None + else hydra.utils.instantiate(cfg["output_interface"], _recursive_=False) + ), +} +``` + +Finally, run the flow with FlowLauncher. + +```python + data = { + "id": 0, + "goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?", +} +# At first, we retrieve information about Michael Jordan the basketball player +# If we provide feedback, only in the first round, that we are not interested in the basketball player, +# but the statistician, and skip the feedback in the next rounds, we get the correct answer + +# ~~~ Run inference ~~~ +path_to_output_file = None +# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk + +_, outputs = FlowLauncher.launch( + flow_with_interfaces=flow_with_interfaces, + data=data, + path_to_output_file=path_to_output_file, +) + +# ~~~ Print the output ~~~ +flow_output_data = outputs[0] +print(flow_output_data) +``` + +The complete example is accessible [here](https://github.com/epfl-dlab/aiflows/tree/main/examples/ReActWithHumanFeedback/) and can be executed as follows: + +```bash +cd examples/ReActWithHumanFeedback +python run.py +``` + +Upon execution, you will be prompted for feedback on the Executor's answer. The interaction will resemble the following: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Jordan'} + +== Result +{'wiki_content': 'Michael Jeffrey Jordan (born February 17, 1963), also known by his initials MJ, is an American businessman and former professional basketball player. His profile on the official National Basketball Association (NBA) website states that "by acclamation, Michael Jordan is the greatest basketball player of all time." He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls. He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels. As a freshman, he was a member of the Tar Heels\' national championship team in 1982. Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game\'s best defensive players. His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames "Air Jordan" and "His Airness". Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat. Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season. He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards. During his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan\'s individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award. He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game). In 1999, he was named the 20th century\'s greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press\' list of athletes of the century. Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men\'s Olympic basketball team ("The Dream Team"). He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Ha'} + +[2023-12-06 09:30:40,844][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` + +You can respond with: + +``` +No I'm talking about Michael Irwin Jordan. I think he's a statistician. Maybe look him up on wikipedia? +``` + +Subsequently, ReAct will provide a response similar to this: +``` +Relevant information: +== Goal == +Answer the following question: What is the profession and date of birth of Michael Jordan? + +== Last Command == +wiki_search + +== Args +{'search_term': 'Michael Irwin Jordan'} + +== Result +{'wiki_content': 'Michael Irwin Jordan (born February 25, 1956) is an American scientist, professor at the University of California, Berkeley and researcher in machine learning, statistics, and artificial intelligence.Jordan was elected a member of the National Academy of Engineering in 2010 for contributions to the foundations and applications of machine learning.\nHe is one of the leading figures in machine learning, and in 2016 Science reported him as the world\'s most influential computer scientist.In 2022, Jordan won the inaugural World Laureates Association Prize in Computer Science or Mathematics, "for fundamental contributions to the foundations of machine learning and its application."\n\n\n== Education ==\nJordan received his BS magna cum laude in Psychology in 1978 from the Louisiana State University, his MS in Mathematics in 1980 from Arizona State University and his PhD in Cognitive Science in 1985 from the University of California, San Diego. At the University of California, San Diego, Jordan was a student of David Rumelhart and a member of the Parallel Distributed Processing (PDP) Group in the 1980s.\n\n\n== Career and research ==\nJordan is the Pehong Chen Distinguished Professor at the University of California, Berkeley, where his appointment is split across EECS and Statistics. He was a professor at the Department of Brain and Cognitive Sciences at MIT from 1988 to 1998.In the 1980s Jordan started developing recurrent neural networks as a cognitive model. In recent years, his work is less driven from a cognitive perspective and more from the background of traditional statistics.\nJordan popularised Bayesian networks in the machine learning community and is known for pointing out links between machine learning and statistics. He was also prominent in the formalisation of variational methods for approximate inference and the popularisation of the expectation–maximization algorithm in machine learning.\n\n\n=== Resignation from Machine Learning ===\nIn 2001, Jordan and others resigned from the editorial board of the journal Machine Learning. In a public letter, they argued for less restrictive access and pledged support for a new open access journal, the Journal of Machine Learning Research, which was created by Leslie Kaelbling to support the evolution of the field of machine learning.\n\n\n=== Honors and awards ===\nJordan has received numerous awards, including a best student paper award (with X. Nguyen and M. Wainwright) at the International Conference on Machine Learning (ICML 2004), a best paper award (with R. Jacobs) at the American Control Conference (ACC 1991), the ACM-AAAI Allen Newell Award, the IEEE Neural Networks Pioneer Award, and an NSF Presidential Young Investigator Award. In 2002 he was named an AAAI Fellow "for significant contributions to reasoning under uncertainty, machine learning, and human motor control." In 2004 he was named an IMS Fellow "for contributions to graphical models and machine learning." In 2005 he was named an IEEE Fellow "for '} +[2023-12-06 09:53:52,058][aiflows.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow:126][INFO] - Please enter you single-line response and press enter. +``` +Your subsequent response could be: + +``` +There you go! I think you have it! +``` +Eventually, the flow should terminate and return something similar to: + +``` +[{'answer': 'Michael Jordan is a scientist, professor, and researcher in machine learning, statistics, and artificial intelligence. He was born on February 25, 1956.', 'status': 'finished'}] +``` + +Finally, it provides the correct answer! + + +Nevertheless, persisting with the use of `ReActWithHumanFeedback` may reveal an inherent challenge, particularly in prolonged conversations. The primary issue arises when attempting to pass the entire message history to the language model (LLM), eventually surpassing the maximum token limit allowable. As a workaround, we currently send only the first two and the last messages as context to the LLM. However, this approach is suboptimal if you desire your model to maintain a more comprehensive long-term memory. + +To address this limitation, we recommend exploring the subsequent tutorial, [AutoGPT Tutorial](./autogpt_tutorial.md). This tutorial introduces a fundamental implementation of AutoGPT, enhancing the ReAct flow by incorporating a Memory Flow. This addition tackles the challenge of managing longer conversations. + +___ + + +**Next Tutorial:** [AutoGPT Tutorial](./autogpt_tutorial.md) \ No newline at end of file diff --git a/docs/getting_started/Tutorial/setting_up_aiFlows.md b/docs/getting_started/Tutorial/setting_up_aiFlows.md new file mode 100644 index 0000000..79ca6c2 --- /dev/null +++ b/docs/getting_started/Tutorial/setting_up_aiFlows.md @@ -0,0 +1,96 @@ +# Setting up aiFlows +Welcome to a straightforward tutorial in which we walk you through a suggested setup that will provide you with a smooth and efficient workflow. + + +Let's dive right in. This document is a tutorial for setting up the following: + +1. [Section 1:](#section-1-installing-aiflows) Installing aiFlows +2. [Section 2:](#section-2-setting-up-the-flowverse) Setting Up The FlowVerse +3. [Section 3:](#section-3-setting-up-your-api-keys) Setting Up Your API Keys + + +### By the Tutorial's End, I Will Have... +* Installed the aiFlows library successfully +* Established an organized file structure for seamless collaboration within the FlowVerse +* Set up a Hugging Face account for contribution to the FlowVerse (Optional) +* Configured and activated my API keys + +## Section 1: Installing aiFlows +Begin the installation process for aiFlows with Python 3.10+ using: +```basha +pip install aiflows +``` +Alternatively, for a manual installation: + +```bash +git clone https://github.com/epfl-dlab/aiflows.git +cd aiflows +conda create --name flows python=3.10 +conda activate flows +pip install -e . +``` + +## Section 2: Setting Up The FlowVerse + +### Step 1: Setting up efficient Folder Structure +Create a dedicated folder for the FlowVerse, following our recommended structure: +```bash +mkdir FlowVerse +``` +Following the download of your initial Flows from the FlowVerse, your folder arrangement should look like this: +```bash +|-- YourProject +|-- flow_modules +| |-- Flow1 +| |-- Flow2 +| |-- ... +``` +This ensures all your Flows are conveniently centralized in a single place, simplifying management. + +### Step 2: Optional - Linking Hugging Face for FlowVerse Push + +To facilitate FlowVerse pushing, it's essential to link your Hugging Face account: +1. Begin by creating a [Hugging Face](https://huggingface.co/join) account at huggingface and verify your email. +2. Log in to Hugging Face in the terminal using: + * For terminal login, you'll need an access token. If you haven't already, [created one](https://huggingface.co/settings/tokens) (a read token should be sufficient) + * Enter the following command in the terminal, and when prompted, paste your access token: + ``` + huggingface-cli login + ``` + +This process is essential for the smooth integration of Hugging Face with FlowVerse, ensuring effortless pushing. + +## Section 3: Setting Up Your API Keys + +In this final step, let's configure your API keys as environment variables for your conda environment. We'll demonstrate how to set up keys for both OpenAI and Azure. Note that, thanks to LiteLLM, a variety of providers are available—explore them here: https://docs.litellm.ai/docs/providers + +* If you're using openAI: + * write in your terminal: + ``` + conda env config vars set OPENAI_API_KEY= + ``` + * reactivate your conda environment: + ``` + conda activate + ``` + * To make sure that your key has been set as an environment variable (your environment variables should appear): + ``` + conda env config vars list + ``` +* If you're using Azure: + * write in your terminal: + ``` + conda env config vars set AZURE_OPENAI_KEY= + conda env config vars set AZURE_API_BASE= + conda env config vars set AZURE_API_VERSION= + ``` + * reactivate your conda environment: + ``` + conda activate + ``` + * To make sure that your key has been set as an environment variable (your environment variables should appear): + ``` + conda env config vars list + ``` + +Congratulations! You are now equipped to seamlessly work with aiFlows. Happy flowing! diff --git a/docs/getting_started/Tutorial/tutorial_landing_page.md b/docs/getting_started/Tutorial/tutorial_landing_page.md new file mode 100644 index 0000000..19aefb9 --- /dev/null +++ b/docs/getting_started/Tutorial/tutorial_landing_page.md @@ -0,0 +1,67 @@ +# Tutorials + +Welcome to the exciting world of aiFlows! 🚀 These tutorials are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, **we recommend following the tutorials in the given order**. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path. + +Get ready for an engaging journey where you'll build practical skills and gain a deeper understanding of the power and versatility of aiFlows. + +Let's dive in and explore the following tutorials ! + +## [1. Setting up aiFlows](./setting_up_aiFlows.md) +#### By the Tutorial's End, I Will Have... +* Installed the aiFlows library successfully +* Established an organized file structure for seamless collaboration within the FlowVerse +* Set up a Hugging Face account for contribution to the FlowVerse (Optional) +* Configured and activated my API keys + +## [2. Atomic Flow Tutorial](./atomic_flow.md) + +#### By the Tutorial's End, I Will Have... + +* Gained insight into the relationship among a Flow, an input interface, and an output interface +* Acquired hands-on experience in creating an `AtomicFlow` with the example of `ReverseNumberAtomic` +* Learned how to run a flow with a `FlowLauncher` + +## [3. Composite Flow Tutorial](./composite_flow.md) +#### By the Tutorial's End, I Will Have... + +* Gained insights into the concept of a Composite Flow +* Acquired the skills to create a `SequentialFlow` through a toy example +* Developed an understanding of the utility of input and output interfaces in connecting subflows within the flow structure + +## [4. Introducing the FlowVerse with a Simple Q&A Flow](./intro_to_FlowVerse_minimalQA.md) + +#### By the Tutorial's End, I Will Have... + +* Gained an understanding of the FlowVerse and its significance +* Acquired the skills to retrieve flows from the FlowVerse +* Successfully developed my initial flow by incorporating a FlowVerse flow +* Created a Simple Q&A flow capable of managing user-assistant interactions with a Language Model (LLM) through an API +* Familiarized myself with the fundamental parameters of the `ChatAtomicFlow` + +## [5. ReAct Tutorial](./reAct.md) + +#### By the Tutorial's End, I Will Have... + +* Gained an understanding of the ReAct flow and its significance +* Acquired the skills to pull multiple flows from the FlowVerse with external library dependencies +* Successfully developed my first personalized ReAct flow +* Familiarized myself with the essential parameters of the `ControllerExecutorFlow` + +## [6. ReAct With Human Feedback Tutorial](./reActwHumanFeedback.md) + +#### By the Tutorial's End, I Will Have... + +* Recognized the distinctions between ReAct and ReActWithHumanFeedback and their consequences +* Learned how to integrate a human feedback flow into ReAct +* Incorporated customized functions into the input and output interfaces. +* Grasped the limitations of ReAct, particularly its lack of long-term memory +* Deepened my understanding of the key parameters in the `ControllerExecutorFlow` configuration + + +## [7. AutoGPT Tutorial](./autogpt_tutorial.md) + +#### By the Tutorial's End, I Will Have... + +* Acknowledged the differences between AutoGPT and ReActWithHumanFeedback and their implications +* Gained proficiency in executing the AutoGPTFlow +* Enhanced comprehension of intricate flow structures \ No newline at end of file diff --git a/docs/getting_started/detailed_examples/autogpt.md b/docs/getting_started/detailed_examples/autogpt.md new file mode 100644 index 0000000..29fbc24 --- /dev/null +++ b/docs/getting_started/detailed_examples/autogpt.md @@ -0,0 +1,312 @@ +# AutoGPT + +## Definition + +The `AutoGPT` flow is a circular flow that organizes the problem-solving process into four distinct subflows: + +1. `ControllerFlow`: Given an a goal and observations (from past executions), it selects from a predefined set of actions, which are explicitly defined in the `ExecutorFlow`, the next action it should execute to get closer accomplishing its goal. In our configuration, we implement the `ControllerFlow` using the `ChatAtomicFlow` + +2. `ExecutorFlow`: Following the action selection by the `ControllerFlow`, the process moves to the `ExecutorFlow`. This is a branching flow that encompasses a set of subflows, with each subflow dedicated to a specific action. The `ExecutorFlow` executes the particular subflow associated with the action chosen by the `ControllerFlow`. In our setup, the `ExecutorFlow` includes the following individual flows: + * `WikiSearchAtomicFlow`: This flow, given a "search term," executes a Wikipedia search and returns content related to the search term. + * `LCToolFlow` using `DuckDuckGoSearchRun`: This flow, given a "query," queries the DuckDuckGo search API and retrieves content related to the query. + +3. `HumanFeedbackFlow`: This flow prompts the user for feedback on the latest execution of the `ExecutorFlow`. The collected feedback is then conveyed back to the `ControllerFlow` to be considered in the subsequent execution step. Additionally, the flow is designed to have the capability to terminate the `ReActWithHumanFeedbackFlow` if the user expresses such a preference. + +4. `MemoryFlow`: This flow is used to read and write and read memories stored of passed conversations in a database. These memories can be passed to the `ControllerFlow` enabling it to have a long term memory without having to transmit the entire message history to the language model (LLM). It's implemented with the `VectorStoreFlow` + +## Topology + +The sequence of execution for `AutoGPT`'s flows is circular and follows this specific order: + +1. The `MemoryFlow` retrieves relevant information from memory +2. The `ControllerFlow` selects the next action to execute and prepares the input for the `ExecutorFlow` +3. The `ExecutorFlow` executes the action specified by the `ControllerFlow` +4. The `HumanFeedbackFlow` asks the user for feedback +5. The `MemoryFlow` writes relevant information to memory + +Here's a broad overview of the `AutoGPTFlow`: + +``` +| -------> Memory Flow -------> Controller Flow ------->| +^ | +| | +| v +| <----- HumanFeedback Flow <------- Executor Flow <----| +``` + + + +## Subflows + +### Memory Flow + +We utilize the `ChromaDBFlow` from the [VectorStoreFlowModule](https://huggingface.co/aiflows/VectorStoreFlowModule) as the `MemoryFlow`. For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/VectorStoreFlowModule) for an extensive description of its parameters. + +Like every flow, when `ChromaDBFlow`'s `run` is called function is called: + +```python +def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]: + """ This method runs the flow. It runs the ChromaDBFlow. It either writes or reads memories from the database. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The output data of the flow. + :rtype: Dict[str, Any] + """ + api_information = self.backend.get_key() + + if api_information.backend_used == "openai": + embeddings = OpenAIEmbeddings(openai_api_key=api_information.api_key) + else: + # ToDo: Add support for Azure + embeddings = OpenAIEmbeddings(openai_api_key=os.getenv("OPENAI_API_KEY")) + response = {} + + operation = input_data["operation"] + if operation not in ["write", "read"]: + raise ValueError(f"Operation '{operation}' not supported") + + content = input_data["content"] + if operation == "read": + if not isinstance(content, str): + raise ValueError(f"content(query) must be a string during read, got {type(content)}: {content}") + if content == "": + response["retrieved"] = [[""]] + return response + query = content + query_result = self.collection.query( + query_embeddings=embeddings.embed_query(query), + n_results=self.flow_config["n_results"] + ) + + response["retrieved"] = [doc for doc in query_result["documents"]] + + elif operation == "write": + if content != "": + if not isinstance(content, list): + content = [content] + documents = content + self.collection.add( + ids=[str(uuid.uuid4()) for _ in range(len(documents))], + embeddings=embeddings.embed_documents(documents), + documents=documents + ) + response["retrieved"] = "" + + return response +``` +One can notice that `ChromaDBFlow` acts as an encapsulation for chromadb's vector store-backend memory, which offers support for two types of operations: + +- `read`: This operation involves retrieving the `n_results` most relevant documents from ChromaDB based on the provided `content`. +- `write`: This operation is utilised to add the given `content` to VectorDB. + +#### Additional Documentation: + +* To delve into the extensive documentation for `ChromaDBFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/VectorStoreFlowModule) +* Find `ChromaDBFlow`'s default [configuration here](https://huggingface.co/aiflows/VectorStoreFlowModule/blob/main/ChromaDBFlow.yaml) +* For more information on the `chromadb` library, explore its [documentation](https://docs.trychroma.com/) + + + + +### ControllerFlow + +We utilize the `ControllerAtomicFlow` from the [ControllerExecutorFlowModule ](https://huggingface.co/aiflows/ControllerExecutorFlowModule) as the `ControllerFlow`. For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/ControllerExecutorFlowModule) for an extensive description of its parameters. + +`ControllerAtomicFlow`'s `run` function looks like this: + +```python +def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]: + """ This method runs the flow. Note that the response of the LLM is in the JSON format, but it's not a hard constraint (it can hallucinate and return an invalid JSON) + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The output data of the flow (thought, reasoning, criticism, command, command_args) + :rtype: Dict[str, Any] + """ + api_output = super().run(input_data)["api_output"].strip() + response = json.loads(api_output) + return response +``` + +The `run` function is a straightforward wrapper around [ChatAtomicFlow](https://huggingface.co/aiflows/ChatFlowModule). The Language Model (LLM) responds in JSON format, but this isn't strictly enforced—it may occasionally return an invalid JSON. The soft constraint is set by the system prompt, detailed in [its default configuration](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerAtomicFlow.yaml). This configuration specifies the expected output format and describes the available commands it has access to (these are the subflows of the `ExecutorFlow`). The system prompt template is as follows: + +```yaml +system_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + You are a smart AI assistant. + + Your decisions must always be made independently without seeking user assistance. + Play to your strengths as an LLM and pursue simple strategies with no legal complications. + If you have completed all your tasks, make sure to use the "finish" command. + + Constraints: + 1. No user assistance + 2. Exclusively use the commands listed in double quotes e.g. "command name" + + Available commands: + {{commands}} + + Resources: + 1. Internet access for searches and information gathering. + 2. Long Term memory management. + + Performance Evaluation: + 1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities. + 2. Constructively self-criticize your big-picture behavior constantly. + 3. Reflect on past decisions and strategies to refine your approach. + 4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps. + You should only respond in JSON format as described below + Response Format: + { + "thought": "thought", + "reasoning": "reasoning", + "plan": "- short bulleted\n- list that conveys\n- long-term plan", + "criticism": "constructive self-criticism", + "speak": "thoughts summary to say to user", + "command": "command name", + "command_args": { + "arg name": "value" + } + } + Ensure your responses can be parsed by Python json.loads +input_variables: ["commands"] +``` +Where "{{commands}}" is the placeholder for the available commands which are added to the template when the `ControllerAtomicFlow` is being instantiated. + +The goal and observations (from past executions) are passed via the `human_message_prompt` and the `init_human_message_prompt` who are the following: +```yaml +human_message_prompt_template: + template: |2 + Potentially relevant information retrieved from your memory: + {{memory}} + ================= + Here is the response to your last action: + {{observation}} + Here is the feedback from the user: + {{human_feedback}} + input_variables: + - "observation" + - "human_feedback" + - "memory" +input_interface_initialized: + - "observation" + - "human_feedback" + - "memory" +``` + +#### Additional Documentation: + +* To delve into the extensive documentation for `ControllerAtomicFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/ControllerExecutorFlowModule) +* Find `ControllerAtomicFlow`'s default [configuration here](https://huggingface.co/aiflows/ControllerExecutorFlowModule/blob/main/ControllerAtomicFlow.yaml) + + +### ExecutorFlow + +We utilize a [BranchingFlow](https://github.com/epfl-dlab/aiflows/blob/main/aiflows/base_flows/branching.py) from aiFlow's codebase as the `ExecutorFlow`. The `ExecutorFlow` by default has two subflows which are the available commands the `ControllerFlow` can call: + +#### 1. The LCToolFlow + +The `LCToolFlow` is an atomic flow functioning as an interface for LangChain tools. This flow operates by taking a `tool_input`, which corresponds to the tool's keyword arguments, as its input, and then provides the observation as its output. + +```python + def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]: + """ This method runs the flow. It runs the backend on the input data. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The output data of the flow. + :rtype: Dict[str, Any] + """ + observation = self.backend.run(tool_input=input_data) + + return {"observation": observation} +``` + +Using a tool with the `LCToolFlow` is a straightforward process. By setting the desired tool as the backend's `_target_`, you can seamlessly integrate it into your workflow. For a comprehensive list of compatible tools, please refer to the Integrations section in [LangChain's Tool documentation](https://python.langchain.com/docs/modules/agents/tools/). + +```yaml +- _target_: flow_modules.aiflows.LCToolFlowModule.LCToolFlow.instantiate_from_default_config + overrides: + name: "ddg_search" + backend: + _target_: langchain.tools.DuckDuckGoSearchRun +``` + +#### 2. The WikiSearchAtomicFlow + +The `WikiSearchAtomicFlow` is also atomic flow and functions as an interface for Wikipedia's API. Given a `search_term`, it can execute a search on wikipedia and fetch page summaries to eventually pass it back to the `ControllerFlow` +```python +def run(self, + input_data: Dict[str, Any]) -> Dict[str, Any]: + """ Runs the WikiSearch Atomic Flow. It's used to execute a Wikipedia search and get page summaries. + + :param input_data: The input data dictionary + :type input_data: Dict[str, Any] + :return: The output data dictionary + :rtype: Dict[str, Any] + """ + + # ~~~ Process input ~~~ + term = input_data.get("search_term", None) + api_wrapper = WikipediaAPIWrapper( + lang=self.flow_config["lang"], + top_k_results=self.flow_config["top_k_results"], + doc_content_chars_max=self.flow_config["doc_content_chars_max"] + ) + + # ~~~ Call ~~~ + if page_content := api_wrapper._fetch_page(term): + search_response = {"wiki_content": page_content, "relevant_pages": None} + else: + page_titles = api_wrapper.search_page_titles(term) + search_response = {"wiki_content": None, "relevant_pages": f"Could not find [{term}]. similar: {page_titles}"} + + # Log the update to the flow messages list + observation = search_response["wiki_content"] if search_response["wiki_content"] else search_response["relevant_pages"] + return {"wiki_content": observation} +``` + +#### Additional Documentation: + +* Refer to [LCToolFlow's FlowCard](https://huggingface.co/aiflows/LCToolFlowModule) and [WikiSearchAtomicFlow's FlowCard](https://huggingface.co/aiflows/ControllerExecutorFlowModule) for further documentation + + +### Human Feedback Flow + +We utilize the `HumanStandadInputFlow` from the [HumanStandardInputFlowModule ](https://huggingface.co/aiflows/HumanStandardInputFlowModule) as the `HumanFeedbackFlow`. For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/HumanStandardInputFlowModule) for an extensive description of its parameters. + +Its `run` function enables users to provide feedback at the conclusion of each iteration. This feedback is subsequently appended to the observation generated by the `ExecutorFlow`. By doing so, the feedback becomes part of the memory, thereby influencing the agent's decision-making process. + +```python +def run(self, + input_data: Dict[str, Any]) -> Dict[str, Any]: + """ Runs the HumanStandardInputFlow. It's used to read input from the user/human's standard input. + + :param input_data: The input data dictionary + :type input_data: Dict[str, Any] + :return: The output data dictionary + :rtype: Dict[str, Any] + """ + + query_message = self._get_message(self.query_message_prompt_template, input_data) + state_update_message = UpdateMessage_Generic( + created_by=self.flow_config['name'], + updated_flow=self.flow_config["name"], + data={"query_message": query_message}, + ) + self._log_message(state_update_message) + + log.info(query_message) + human_input = self._read_input() + + return {"human_input": human_input} +``` + +In the current context, if the user enters the command `q`, the flow triggers an early exit by setting the early exit key to `True`, which leads to the termination of the Flow. + +#### Additional Documentation: + +* To delve into the extensive documentation for `HumanStandardInputFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/HumanStandardInputFlow) + diff --git a/docs/getting_started/detailed_examples/chat_flow.md b/docs/getting_started/detailed_examples/chat_flow.md new file mode 100644 index 0000000..8ad87ff --- /dev/null +++ b/docs/getting_started/detailed_examples/chat_flow.md @@ -0,0 +1,95 @@ +# ChatAtomicFlow + +## Definition + +The `ChatAtomicFlow` is a flow that seamlessly interfaces with an LLM through an API, generating textual responses for textual inputs. Powered by the LiteLLM library in the backend, `ChatAtomicFlow` supports various API providers; explore the full list [here](https://docs.litellm.ai/docs/providers). For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/VectorStoreFlowModule) for an extensive description of its parameters. + +## Methods + + +In this section, we'll explore some o `ChatAtomicFlow`'s methods, specifically those invoked when it is called. + +Just like every flow, `ChatAtomicFlow` is called via the `run` method: + +```python +def run(self,input_data: Dict[str, Any]): + """ This method runs the flow. It processes the input, calls the backend and updates the state of the flow. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The LLM's api output. + :rtype: Dict[str, Any] + """ + + # ~~~ Process input ~~~ + self._process_input(input_data) + + # ~~~ Call ~~~ + response = self._call() + + #loop is in case there was more than one answer (n>1 in generation parameters) + for answer in response: + self._state_update_add_chat_message( + role=self.flow_config["assistant_name"], + content=answer + ) + response = response if len(response) > 1 or len(response) == 0 else response[0] + return {"api_output": response} +``` + +As you can see in the code snippet here above, `run` processes the input data of the flow via the `_process_input` method. Let's take a closer look at what it does: + + +```python +def _process_input(self, input_data: Dict[str, Any]): + """ This method processes the input of the flow. It adds the human message to the flow's state. If the conversation is not initialized, it also initializes it + (adding the system message and potentially the demonstrations). + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + """ + if self._is_conversation_initialized(): + # Construct the message using the human message prompt template + user_message_content = self._get_message(self.human_message_prompt_template, input_data) + + else: + # Initialize the conversation (add the system message, and potentially the demonstrations) + self._initialize_conversation(input_data) + if getattr(self, "init_human_message_prompt_template", None) is not None: + # Construct the message using the query message prompt template + user_message_content = self._get_message(self.init_human_message_prompt_template, input_data) + else: + user_message_content = self._get_message(self.human_message_prompt_template, input_data) + + self._state_update_add_chat_message(role=self.flow_config["user_name"], + content=user_message_content) +``` +This function prepares the user message prompt for submission to the Language Model (LLM) by inserting the `input_data` into the placeholders of the user prompt template (details of which will be explained later). The choice of user prompt sent to the LLM depends on whether the conversation has been initiated or not (i.e., whether the flow has been called): + +- If the conversation has not been initialized, the message is constructed using the `init_human_message_prompt_template`. In this case, the expected input interface for the flow is specified by `input_interface_non_initialized`. + +- If the conversation has been initialized, the message is constructed using the `human_message_prompt_template`. In this case, the expected input interface for the flow is specified by `input_interface_initialized`. + +This distinction proves useful when different inputs are needed for the initial query compared to subsequent queries to the flow. For example, in ReAct, the first query to the LLM is initiated by a human, such as asking a question. In subsequent queries, the input is derived from the execution of a tool (e.g., a query to Wikipedia). In ReAct's implementation, these two scenarios are differentiated by ChatAtomicFlow's `input_interface_non_initialized` and `input_interface_initialized`, which define the input interface for the flow. + +[ChatAtomicFlow's default configuration](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) defines user prompt templates as so: +```yaml +init_human_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + +human_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: "{{query}}" + input_variables: + - "query" +input_interface_initialized: + - "query" +``` +This signifies that `init_human_message_prompt_template` represents an empty string message, while the rendered message for `human_message_prompt_template` is derived from the previous flow's query. This is achieved by placing the input variable "query" (from `input_dict`) into the `{{query}}` placeholder of the prompt template. + +Finally, the `run` function calls the LLM via the LiteLLM library, saves the message in it's flow state and sends the output to the next flow. + +**Additional Documentation:** + +* To delve into the extensive documentation for `ChatAtomicFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/ChatFlowModule) +* Find `ChatAtomicFlow`'s default [configuration here](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) \ No newline at end of file diff --git a/docs/getting_started/detailed_examples/detailed_example_landing_page.md b/docs/getting_started/detailed_examples/detailed_example_landing_page.md new file mode 100644 index 0000000..50e8422 --- /dev/null +++ b/docs/getting_started/detailed_examples/detailed_example_landing_page.md @@ -0,0 +1,42 @@ +# Detailed Examples + +Welcome to the exciting world of aiFlows! 🚀 These guides are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, **we recommend following the guides in the given order**. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path. + +Get ready for an engaging journey where you'll build practical skills and gain a deeper understanding of the power and versatility of aiFlows. + +Let's dive in and explore the following guides ! + +## [1. ChatAtomicFlow](./chat_flow.md) + +#### By the Guide's End, I Will Have... + +* Gained insight into the execution flow of `ChatAtomicFlow` + +* Acquired an understanding of how `ChatAtomicFlow` processes inputs + +* Identified the documentation resource for `ChatAtomicFlow` + + +## [2. VisionAtomicFlow](./vision_flow.md) + +#### By the Guide's End, I Will Have... + +* Gained insight into the execution flow of `VisionAtomicFlow` + +* Acquired an understanding of how `VisionAtomicFlow` processes inputs + +* Recognized the similarities between `VisionAtomicFlow` and `ChatAtomicFlow` + +* Identified the documentation resource for `VisionAtomicFlow` + +## [3. AutoGPTFlow](./autogpt.md) + +Note: This guide is also useful if you're interested in ReAct since the two share lots of similarities + +#### By the Guide's End, I Will Have... + +* Understood the purpose of `AutoGPTFlow` + +* Explored the functionalities of `AutoGPT`'s subflows + +* Identified the documentation resource for `AutoGPTFlow` and its subflows diff --git a/docs/getting_started/detailed_examples/vision_flow.md b/docs/getting_started/detailed_examples/vision_flow.md new file mode 100644 index 0000000..7b71a96 --- /dev/null +++ b/docs/getting_started/detailed_examples/vision_flow.md @@ -0,0 +1,106 @@ +# Vision Atomic Flow +**Prequisite**: [Chat Atomic Flow](./chat_flow.md) + +## Definition + +The `VisionAtomicFlow` is a flow that seamlessly interfaces with an LLM through an API, . It is a flow that, given a textual input, and a set of images and/or videos, generates a textual output. Powered by the LiteLLM library in the backend, `VisionAtomicFlow` supports various API providers; explore the full list [here](https://docs.litellm.ai/docs/providers). For a detailed understanding of its parameters, refer to its [`FlowCard`](https://huggingface.co/aiflows/VisionFlowModule) for an extensive description of its parameters. + +## Methods + +In this section, we'll delve into some of the methods within the `VisionAtomicFlow` class, specifically those invoked when it is called. + +If you examine the [`VisionAtomicFlow` class](https://huggingface.co/aiflows/VisionFlowModule/blob/main/VisionAtomicFlow.py), you'll observe the following: + +1. It's a class that inherits from the `ChatAtomicFlow`. +2. There is no `run` method explicitly defined, and as a result, it shares the same `run` method as `ChatAtomicFlow`, which is the method always called when a flow is invoked. + +Here is the run method of VisionAtomicFlow: +```python +def run(self,input_data: Dict[str, Any]): + """ This method runs the flow. It processes the input, calls the backend and updates the state of the flow. + + :param input_data: The input data of the flow. + :type input_data: Dict[str, Any] + :return: The LLM's api output. + :rtype: Dict[str, Any] + """ + + # ~~~ Process input ~~~ + self._process_input(input_data) + + # ~~~ Call ~~~ + response = self._call() + + #loop is in case there was more than one answer (n>1 in generation parameters) + for answer in response: + self._state_update_add_chat_message( + role=self.flow_config["assistant_name"], + content=answer + ) + response = response if len(response) > 1 or len(response) == 0 else response[0] + return {"api_output": response} +``` + +In the provided code snippet, observe that the `run` method handles the input data of the flow through the `_process_input` method. Let's delve into a closer examination of its functionality: + + +```python +def _process_input(self, input_data: Dict[str, Any]): + """ This method processes the input data (prepares the messages to send to the API). + + :param input_data: The input data. + :type input_data: Dict[str, Any] + :return: The processed input data. + :rtype: Dict[str, Any] + """ + if self._is_conversation_initialized(): + # Construct the message using the human message prompt template + user_message_content = self.get_user_message(self.human_message_prompt_template, input_data) + + else: + # Initialize the conversation (add the system message, and potentially the demonstrations) + self._initialize_conversation(input_data) + if getattr(self, "init_human_message_prompt_template", None) is not None: + # Construct the message using the query message prompt template + user_message_content = self.get_user_message(self.init_human_message_prompt_template, input_data) + else: + user_message_content = self.get_user_message(self.human_message_prompt_template, input_data) + + self._state_update_add_chat_message(role=self.flow_config["user_name"], + content=user_message_content) +``` + + +When calling `_process_input(input_data)` in `VisionAtomicFlow`, the flow generates its user message prompt similarly to `ChatAtomicFlow` (refer to [ChatAtomicFlow's detailed example](./chat_flow.md)). However, due to a slight modification in the `get_user_message` method compared to `ChatAtomicFlow`, it also includes one or multiple images or videos in the input. + +```python + @staticmethod + def get_user_message(prompt_template, input_data: Dict[str, Any]): + """ This method constructs the user message to be passed to the API. + + :param prompt_template: The prompt template to use. + :type prompt_template: PromptTemplate + :param input_data: The input data. + :type input_data: Dict[str, Any] + :return: The constructed user message (images , videos and text). + :rtype: Dict[str, Any] + """ + content = VisionAtomicFlow._get_message(prompt_template=prompt_template,input_data=input_data) + media_data = input_data["data"] + if "video" in media_data: + content = [ content[0], *VisionAtomicFlow.get_video(media_data["video"])] + if "images" in media_data: + images = [VisionAtomicFlow.get_image(image) for image in media_data["images"]] + content.extend(images) + return content +``` + +Note that images can be passed either via a URL (an image on the internet) or by providing the path to a local image. However, videos must be local videos. + + +Finally, the `run` function calls the LLM via the LiteLLM library, saves the message in it's flow state and sends the textual output to the next flow. + +**Additional Documentation:** + +* To delve into the extensive documentation for `VisionAtomicFlow`, refer to its [FlowCard on the FlowVerse](https://huggingface.co/aiflows/VisionFlowModule) +* Find `ChatAtomicFlow`'s default [configuration here](https://huggingface.co/aiflows/VisionFlowModule/blob/main/demo.yaml) \ No newline at end of file diff --git a/docs/getting_started/developer_guide/developper_guide_landing_page.md b/docs/getting_started/developer_guide/developper_guide_landing_page.md new file mode 100644 index 0000000..bcc5a71 --- /dev/null +++ b/docs/getting_started/developer_guide/developper_guide_landing_page.md @@ -0,0 +1,27 @@ +# Developer's Guide + +Welcome to the exciting world of aiFlows! 🚀 These guides are your gateway to mastering the main concepts of aiFlows and the FlowVerse through hands-on examples. To make the most of your learning experience, **we recommend following the tutorials in the given order**. Each tutorial builds on the concepts introduced in the previous one, providing a structured and comprehensive learning path. + +Get ready for an engaging journey where you'll build practical skills and gain a deeper understanding of the power and versatility of aiFlows. + +Let's dive in and explore the following guides ! + +## [1. Flow Module Management](./flow_module_management.md) + +#### By the Tutorial's End, I Will Have... + +* Gained a clear understanding of pulling flows from the FlowVerse. + +* Mastered the handling of flows that depend on other flows. + +## [2. Typical Developer Workflows](./typical_developer_workflows.md) + +#### By the Tutorial's End, I Will Have... + +* Learned how to Create a Flow + +* Learned how to Test a Flow + +* Learned how to Publish a Flow + +* Learned how to contributing to an existing flow \ No newline at end of file diff --git a/docs/getting_started/developer_guide/flow_module_management.md b/docs/getting_started/developer_guide/flow_module_management.md new file mode 100644 index 0000000..6946486 --- /dev/null +++ b/docs/getting_started/developer_guide/flow_module_management.md @@ -0,0 +1,81 @@ +# Flow Module Management + +### By the Tutorial's End, I Will Have... + +* Gained a clear understanding of pulling flows from the FlowVerse. + +* Mastered the handling of flows that depend on other flows. + +## Introduction + +The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, these Flows can be readily downloaded, used, extended or composed into novel, more complex Flows. For the ones using ChatGPT, you could think of them as open-source GPTs(++). + +In the heart of this platform, the community shares their unique Flows, encapsulated in what we call **flow modules**. + +## Flow Modules + +- Each Hugging Face published repository corresponds to a self-contained flow module. For instance, [nbaldwin/ChatInteractiveFlowModule](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule) is a flow module. +- A module may include multiple Flow classes and potentially a default configuration YAML file. In the [nbaldwin/ChatInteractiveFlowModule](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule) module, you can find [ChatHumanFlowModule.py](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule/blob/main/ChatHumanFlowModule.py). +- Each Flow class can depend on other remote, publicly available modules. For example, [ChatHumanFlowModule.py](https://huggingface.co/aiflows/ChatInteractiveFlowModule/blob/main/ChatHumanFlowModule.py) depends on [aiflows/ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule). + +## Syncing Flow Modules + +To use or import a flow module, first sync it to the `flow_modules` directory in your root directory. You can then import it like any local Python package. Consider the following `trivial_sync_demo.py`, which relies on [nbaldwin/ChatFlows](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule): + +```python +dependencies = [ + {"url": "nbaldwin/ChatInteractiveFlowModule", "revision": "main"}, +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.nbaldwin.ChatInteractiveFlowModule import ChatHumanFlowModule + +if __name__ == "__main__": + print("This is a trivial sync demo.") +``` + +This synchronization process, though it may seem unconventional at first, provides a number of advantages: +* The synchronization process examines the implementation of remote flow modules seamlessly, eliminating the need to switch between your integrated development * environment (IDE) and a web page. +* It extends existing implementations effortlessly without the requirement to download or clone the repository manually. + +## Flow Module Namespace + +* Remote flow modules are identified by their Hugging Face repository ID and revision, such as `nbaldwin/ChatInteractiveFlowModule:main`. +* Each locally synchronized flow module manifests as a valid Python package within the `flow_modules` directory, exemplified by structures like `flow_modules.nbaldwin.ChatInteractiveFlowModule`. Importantly, only one revision is retained for each remote flow module, a practice upheld to ensure clarity and manage revision conflicts. Should a conflict arise, a warning will guide you to select the preferred version. + +For a visual representation, consider the following directory structure: + +```shell +(aiflows) ➜ dev-tutorial tree . +. +├── flow_modules +│ ├── aiflows +│ │ └── ChatFlowModule +│ │ ├── ... +│ │ ├── ChatAtomicFlow.py +│ │ ├── ChatAtomicFlow.yaml +│ │ ├── ... +│ │ ├── ... +│ │ └── __pycache__ +│ │ ├── ChatAtomicFlow.cpython-39.pyc +│ │ └── __init__.cpython-39.pyc +│ └── nbaldwin +│ └── ChatInteractiveFlowModule +│ ├── ... +│ ├── ChatHumanFlowModule.py +│ ├── ChatHumanFlowModule.yaml +│ ├── README.md +│ ├── ... +│ └── __pycache__ +│ ├── ChatHumanFlowModule.cpython-39.pyc +│ └── __init__.cpython-39.pyc +└── trivial_sync_demo.py + +9 directories, 16 files +``` +In this illustration, the `nbaldwin/ChatInteractiveFlowModule` flow module relies on the remote flow module `aiflows/ChatAtomicFlow`. Both dependencies are seamlessly synchronized under the flow_modules directory. The synchronization and importation of dependencies mirror each other, ensuring a consistent and logical approach across remote and local development environments. + +____ + +**Next Tutorial**: [Typical Developer Workflows](./typical_developer_workflows.md) \ No newline at end of file diff --git a/docs/getting_started/developer_guide/typical_developer_workflows.md b/docs/getting_started/developer_guide/typical_developer_workflows.md new file mode 100644 index 0000000..ca0f41e --- /dev/null +++ b/docs/getting_started/developer_guide/typical_developer_workflows.md @@ -0,0 +1,252 @@ +# Typical Developer Workflows +**prerequisites**: [Flow Module Management](./flow_module_management.md) + +## Creating, Testing, and Publishing Your Own Flow Module + +### By the Tutorial's End, I Will Have... + +* Learned how to Create a Flow + +* Learned how to Test a Flow + +* Learned how to Publish a Flow + +* Learned how to contributing to an existing flow + + +### Creating Your Own Flow Module + +To start, create a local directory where you'll develop your flow module: + +```shell +(aiflows) ➜ dev-tutorial mkdir PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots +(aiflows) ➜ dev-tutorial cd PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots +(aiflows) ➜ dev_UsefulChatBots touch __init__.py +(aiflows) ➜ dev_UsefulChatBots touch .gitignore +(aiflows) ➜ dev_UsefulChatBots touch EconomicExpertBot.py +(aiflows) ➜ dev_UsefulChatBots git init +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git add . +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git commit -m "initial commit" +[main (root-commit) e592fd1] initial commit +3 files changed, 0 insertions(+), 0 deletions(-) +create mode 100644 .gitignore +create mode 100644 EconomicExpertBot.py +create mode 100644 __init__.py +``` + +Next, we could either develop from scratch as in [Tutorial for AtomicFlow](../Tutorial/atomic_flow.md) or we could leverage an existing flow module and build upon it. In this tutorial, we'll develop our chatbot based on [aiflows/ChatFlowModule](https://huggingface.co/aiflows/ChatFlowModule) thanks to the modularity of Flows: + +```python +dependencies = [ + {"url": "aiflows/ChatFlowModule", "revision": "main"}, +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow + +class EconomicExpertBot(ChatAtomicFlow): + def __init__(self, **kwargs): + super().__init__(**kwargs) +``` + +We recommend to associate your flow with a default yaml file as the default config. This default config will serve as a clear spec of the Flow class. For example, in our case: +```yaml +name: "EconomicExpertBot" +description: "A chatbot which answers questions about the economy." + +input_interface: + - "query" + +output_interface: + - "response" + +system_message_prompt_template: + _target_: aiflows.prompt_template.JinjaPrompt + template: |2- + You are an expertise in finance, economy and investment. When you explain something, you always provide associated statistical numbers, source of the information and concrete examples. You tend to explain things in a step-by-step fashion to help the reader to understand. You are also proficient in both English and Chinese. You can answer questions fluently in both languages. + + input_variables: [] +``` + +This explicitly informs potential users about the `input_interface` and `output_interface`, which can be seen as the interface of our Flow. Since we're inheriting from `aiflows/ChatFlowModule.ChatAtomicFlow`, we also inherit the [default config](https://huggingface.co/aiflows/ChatFlowModule/blob/main/ChatAtomicFlow.yaml) from it. Therefore, our default config can be succinct and only needs to tweak some essential parameters. + +Note that a flow module should ideally be a self-contained python module. Therefore, it's best to use relative import inside your code such that other users can use your flow instantly. + +### Testing Your Own Flow Module + +So far so good, we have created our own flow. Let's now try to test it: + +```python +dependencies = [ + {"url": "yeeef/UsefulChatBots", "revision": "PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots"}, +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +import os + +from flow_modules.yeeef.UsefulChatBots.EconomicExpertBot import EconomicExpertBot +from aiflows.flow_launchers import FlowLauncher + + +if __name__ == "__main__": + # ~~~ Set the API information ~~~ + # OpenAI backend + + api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] + + overrides = { "backend": {"api_infos": : api_information}} + + bot = EconomicExpertBot.instantiate_from_default_config(**overrides) + # the data points in inputs must satisfy the requirements of input_keys + data = [ + { + "id": 0, "query": "What is CPI? What is the current CPI in the US?", + }, + ] + print(f"inputs: {data}") + + # init a minimal flow_launcher without specifying the output_keys, then + # the full output_keys will be given + outputs = FlowLauncher.launch( + flow_with_interfaces={"flow": data}, + data=inputs, + ) + print(outputs) +``` + +As we are developing locally, the remote revision does not exist yet, so we point the revision to the local path we just created: `PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots`. Note that when we sync a local revision, instead of copying the files locally, we make a symbolic soft link. So you could just modify the code under `flow_modules` and the changes will be automatically propagated to the `PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots`. + +We also specify the namespace of our flow module: `yeeef/UsefulChatBots`. yeeef is my HuggingFace username, and you should replace it with your own Hugging Face username. Note that this `url` could be arbitrary as it does not exist online yet, but we highly recommend that the namespace of the flow module be consistent with your HuggingFace username, such that publishing it later will be seamless. + +Then let’s execute the code and test our new flow: + +``` +(aiflows) ➜ dev-tutorial python ask_economic_expert_bot.py +inputs: [{'id': 0, 'query': 'What is CPI? What is the current CPI in the US?'}] +[2023-07-05 17:05:35,530][aiflows.base_flows.abstract][WARNING] - The raw response was not logged. +[{'id': 0, 'inference_outputs': [OutputMessage(message_id='d95683d6-9507-4a90-b290-6a43e609c904', created_at='2023-07-05 09:05:35.530972000', created_by='EconomicExpertBot', message_type='OutputMessage', data={'output_keys': ['response'], 'output_data': {'response': 'CPI, or the Consumer Price Index, is a measure that examines the weighted average of prices of a basket of consumer goods and services, such as transportation, food, and medical care. It is calculated by taking price changes for each item in the predetermined basket of goods and averaging them. Changes in the CPI are used to assess price changes associated with the cost of living.'}, 'missing_output_keys': []}, private_keys=['api_keys'])], 'error': None}] +``` + +Looks good! Now let’s publish it to the huggingface! + +### Publishing Your Flow Module + +Start by creating a new model on Hugging Face and it will be best to allign with the namespace when we are testing: `yeeef/UsefulChatBots`. Then press the botton `Create model`. +aligning it with the namespace used during testing: `yeeef/UsefulChatBots`. Click the `Create model` button to create the model. + +![](https://hackmd.io/_uploads/r1iB4pGFn.png) + +Then, you can either upload the files manually through the Hugging Face webpage or push your changes to the remote: + +```shell +(aiflows) ➜ dev-tutorial cd PATH_TO_LOCAL_DEV_DIRECTORY/dev_UsefulChatBots +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git remote add origin https://huggingface.co/yeeef/UsefulChatBots +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git pull -r origin main +(aiflows) ➜ dev_UsefulChatBots git:(main) ✗ git push --set-upstream origin main +``` + +Congratulations! You now have your remote module online, available for everyone to use! + + +![](https://hackmd.io/_uploads/HJ4LNafF3.png) + +## Contributing to an Existing Flow + +In this tutorial, we continue to use the `trivial_sync_demo.py` (see [Flow Module Management](./flow_module_management.md)) script. As the dependencies are synced to your root directory, you can instantly modify the synced flow module according to your needs. Once you've made enough changes and feel ready to make a Pull Request (PR), you simply need to push your changes to the Hugging Face repository and create the PR. + +For instance, let's say we want to update the dependency of [nbaldwin/ChatInteractiveFlowModule](https://huggingface.co/nbaldwin/ChatInteractiveFlowModule/tree/main) to the latest version of [aiflows/ChatAtomicFlow](https://huggingface.co/aiflows/ChatFlowModule): + +```python +dependencies = [ + {"url": "aiflows/ChatFlowModule", "revision": "main"} # cae3fdf2f0ef7f28127cf4bc35ce985c5fc4d19a -> main +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow + +class ChatHumanFlowModule(ChatAtomicFlow): + def __init__(self, **kwargs): + + ##SOME CODE +``` + +Firstly, navigate to the synced folder, initialize a git repository, and commit your changes: + +``` +(aiflows) ➜ dev-tutorial cd flow_modules/nbaldwin/ChatInteractiveFlowModule +(aiflows) ➜ ChatInteractiveFlowModule git init +Initialized empty Git repository in /Users/yeeef/Desktop/dlab-ra/dev-tutorial/flow_modules/nbaldwin/ChatInteractiveFlowModule/.git/ +(aiflows) ➜ ChatInteractiveFlowModule git:(main) ✗ git add . +(aiflows) ➜ ChatInteractiveFlowModule git:(main) ✗ git commit -m "Change the dependency revision to main" +[main d7465df] Change the dependency revision to main + 1 file changed, 1 insertion(+), 1 deletion(-) +``` + +Next, you need to open a PR on the target Hugging Face repository. Navigate to `Community` and click on `New pull request`. + +![](https://hackmd.io/_uploads/ry0f4pfF2.png) + + +Enter a brief description for your PR branch and click on `Create PR branch`. + +![](https://hackmd.io/_uploads/S1aQV6fK3.png) + + +Once your PR branch has been created (for instance, `pr/2`), you'll need to push your changes to this branch: + +``` +(aiflows) ➜ ChatInteractiveFlowModule git:(main) git checkout -b pr/2 +Switched to a new branch 'pr/2' +(aiflows) ➜ ChatInteractiveFlowModule git:(pr/2) git remote add origin https://huggingface.co/nbaldwin/ChatInteractiveFlowModule +(aiflows) ➜ ChatInteractiveFlowModule git:(pr/2) git pull -r origin pr/2 +(aiflows) ➜ ChatInteractiveFlowModule git:(pr/2) git push origin pr/2:pr/2 +Enumerating objects: 11, done. +Counting objects: 100% (11/11), done. +Delta compression using up to 10 threads +Compressing objects: 100% (8/8), done. +Writing objects: 100% (8/8), 952 bytes | 952.00 KiB/s, done. +Total 8 (delta 5), reused 0 (delta 0), pack-reused + + 0 +To https://huggingface.co/nbaldwin/ChatInteractiveFlowModule + 1849a87..1818057 pr/2 -> refs/pr/2 +``` + +Finally, review your PR changes on the Hugging Face PR page and click the `Publish` button to finalize your submission. + +![](https://hackmd.io/_uploads/rkvVV6MFn.png) + +## Develop Over an Existing Flow and Publish it Under Your Namespace + +As a Flow developer, you can easily develop based on any synced flow modules. However, instead of making a PR to the original repository, you may wish to publish it under your own namespace. This can be the case if you've made substantial changes that the original author might not prefer. + +Let’s get back to our `trivial_sync_demo`, where we leverage `nbaldwin/ChatInteractiveFlowModule`. We have made some changes to it and want to publish it on our own as `yeeef/MyChatInteractiveFlowModule`. To do this, we recommend following steps: + +**Step 1**: Manually copy the modified flow module out of the `flow_modules` directory: + +```shell +(aiflows) ➜ dev-tutorial cp -r ./flow_modules/nbaldwin/ChatInteractiveFlowModules PATH_TO_LOCAL_DEV_DIRECTORY/MyChatInteractiveFlowModules +``` + +**Step 2**: Next, we can treat it as a local file directory and sync it with a local revision: + +```python +dependencies = [ + {"url": "nbaldwin/ChatInteractiveFlowModules", "revision": "main"}, + {"url": "yeeef/MyChatInteractiveFlowModule", "revision": "PATH_TO_LOCAL_DEV_DIRECTORY/MyChatInteractiveFlowModules"}, + +] +from aiflows import flow_verse +flow_verse.sync_dependencies(dependencies) + +from flow_modules.nbaldwin.ChatInteractiveFlowModules import ChatHumanFlowModule +from flow_modules.yeeef.MyChatInteractiveFlowModules import MyChatInteractiveFlowModules + +if __name__ == "__main__": + print("it is a trivial sync demo") +``` + +**Step 3**: Finally, follow the procedure outlined in [this](#creating-your-own-flow-module) section, and you are good to go! diff --git a/docs/getting_started/index.md b/docs/getting_started/index.md new file mode 100644 index 0000000..f9e68f7 --- /dev/null +++ b/docs/getting_started/index.md @@ -0,0 +1,23 @@ +## Getting Started + +### [Quick start (🕓 5 min)](./Quick_Start/quick_start.md) + +Here, you'll see how you can run inference with your first question-answering Flow, and you can trivially change between vastly different question-answering Flows thanks to the modular abstraction and FlowVerse! + +### [Tutorial (🕓 20 min)](./Tutorial/tutorial_landing_page.md) + +In this tutorial, we introduce you to the library's features through a walkthrough of how to build useful Flows of gradually increasing complexity. Starting from a vanilla QA Flow, we'll first extend it to a ReAct Flow, then ReAct with human feedback, and finish the tutorial with a version of AutoGPT! + +### [Developer's Guide (🕓 10 min)](./developer_guide/developper_guide_landing_page.md) + +We are constantly optimizing our Flow development workflow (pun intended:). In this short guide, we share our best tips so that you don't have to learn the hard way. + +### [Detailed Examples](./detailed_examples/detailed_example_landing_page.md) +Many of the recently proposed prompting and collaboration strategies involving tools, humans, and AI models are, in essence, specific Flows (see the figure below). In the link above, you'll find a detailed walkthrough of how to build some representative workflows. + + + +![The Flows framework exemplified.](/media/previous_flows_rounded.png) +**The Flows framework exemplified.** The first column depicts examples of tools. Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools, constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between Atomic or Composite Flows. The fourth column illustrates a specific Composite competitive coding Flow as those used in the experiments in the [paper](https://arxiv.org/abs/2308.01285). The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior. + + diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..49eb8ec --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,74 @@ +Introduction +============= + +.. toctree:: + :maxdepth: 4 + :caption: Table of Contents + :hidden: + + introduction/index + installation/index + getting_started/index + contributing_info/contribute_index + citation/index + source/modules + +.. figure:: media/logo_text_statement_alt_rounded_corners.png + :align: center + :alt: image + :width: 600px + +🤖🌊 **aiFlows** embodies the `Flows`_ (`arXiv`_) abstraction and greatly simplifies the design and +implementation of complex (work)Flows involving humans, AI systems, and tools. It enables: + +- 🧩 Modularity: Flows can be stacked like LEGO blocks into arbitrarily nested structures with the complexity hidden behind a message-based interface +- 🤝 Reusability: Flows can be shared publicly on the FlowVerse, readily downloaded and reused as part of different Flows +- 🔀 Concurrency: Being consistent with the Actor model of concurrent computation, Flows are concurrency friendly – a necessary feature for a multi-agent future + + +.. _Flows: https://github.com/epfl-dlab/aiflows/assets/flows_paper.pdf +.. _arXiv: https://arxiv.org/abs/2308.01285 + +Flows in a Nutshell +--------------------- + +The framework is centered around *Flows* and *messages*. +Flows represent the fundamental building block of computation. They are independent, self-contained, goal-driven entities able to complete a semantically meaningful unit of work. +To exchange information, Flows communicate via a standardized message-based interface. Messages can be of any type the recipient Flow can process. + +.. figure:: media/fig1_rounded_corners.png + :align: center + :alt: image + :width: 1000px + + The *Flows* framework exemplified. **The first column depicts examples of tools.** Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between *Atomic* or *Composite* Flows. The fourth column illustrates a specific *Composite* competitive coding Flow as those used in the experiments in the paper. The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior. + +FlowVerse in a Nutshell +---------------------------- + +The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, Flows can be readily downloaded, used, extended, or composed into novel, more complex For instance, sharing a Flow that uses only API-based tools (tools subsume models in the Flows abstraction) is as simple as sharing a config file. As an example, `here `_ is the AutoGPT Flow on FlowVerse. For the ones using ChatGPT, you could think of them as completely customizable open-source GPTs(++). + +The FlowVerse is continuously growing. To explore the currently available Flows, check out the FlowVerse Forum on the Discord `channel `_. Additionally, the *Tutorials* and *Detailed Examples* in the `Getting Started `_ sections cover some of the Flows we provide in more detail (e.g., the ChatAtomicFlow and QA, VisionAtomicFlow and VisualQA, ReAct and ReAct with human feedback, AutoGPT, etc.). + +Why should I use aiFlows? +---------------------------- + +AI is set to revolutionize the way we work. Our mission is to support AI researchers and to allow them to seamlessly share advancements with practitioners. This will establish a feedback loop, guiding progress toward beneficial directions while ensuring that everyone can freely access and benefit from the next-generation AI tools. + +As a researcher, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design, implement, and study arbitrarily complex interactions +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reproduce, reuse, or build on top of Flows shared on the FlowVerse and systematically study them across different settings (the infrastructure in the `cc_flows` repository could be a useful starting point in future studies). +- The ability to readily make your work accessible to practitioners and other researchers and access their feedback. + +As a practitioner, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design and implement arbitrarily complex interactions. +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reuse or build on top of Flows shared on the FlowVerse. +- Direct access to any advancements in the field. + +To develop the next-generation AI tools and simultaneously maximize the benefits, developers and researchers need to have complete control over their workflows. aiFlows strives to empower you to make each Flow your own! See the `contribute <../contributing_info/index.rst>`_ section for more information. diff --git a/docs/installation/index.rst b/docs/installation/index.rst new file mode 100644 index 0000000..c21e5ff --- /dev/null +++ b/docs/installation/index.rst @@ -0,0 +1,19 @@ +Installation +=================== + +The library requires Python 3.10+. To install the library, run the following command: + +.. code-block:: shell + + pip install aiflows + +Other Installation Options +-------------------------- + +Install bleeding-edge version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: shell + + git clone git@github.com:epfl-dlab/aiflows.git + cd aiflows + pip install -e . diff --git a/docs/introduction/index.rst b/docs/introduction/index.rst new file mode 100644 index 0000000..f64d1aa --- /dev/null +++ b/docs/introduction/index.rst @@ -0,0 +1,60 @@ +Introduction +============= + +.. figure:: ../media/logo_text_statement_alt_rounded_corners.png + :align: center + :alt: image + :width: 600px + +🤖🌊 **aiFlows** embodies the `Flows`_ (`arXiv`_) abstraction and greatly simplifies the design and implementation of complex (work)Flows involving humans, AI systems, and tools. It enables: + +- 🧩 Modularity: Flows can be stacked like LEGO blocks into arbitrarily nested structures with the complexity hidden behind a message-based interface +- 🤝 Reusability: Flows can be shared publicly on the FlowVerse, readily downloaded and reused as part of different Flows +- 🔀 Concurrency: Being consistent with the Actor model of concurrent computation, Flows are concurrency friendly – a necessary feature for a multi-agent future + +.. _Flows: https://github.com/epfl-dlab/aiflows/assets/flows_paper.pdf +.. _arXiv: https://arxiv.org/abs/2308.01285 + +Flows in a Nutshell +--------------------- + +The framework is centered around *Flows* and *messages*. +Flows represent the fundamental building block of computation. They are independent, self-contained, goal-driven entities able to complete a semantically meaningful unit of work. +To exchange information, Flows communicate via a standardized message-based interface. Messages can be of any type the recipient Flow can process. + +.. figure:: ../media/fig1_rounded_corners.png + :align: center + :alt: image + :width: 1000px + + The *Flows* framework exemplified. **The first column depicts examples of tools.** Notably, in the Flows framework, AI systems correspond to tools. The second column depicts Atomic Flows, effectively minimal wrappers around tools constructed from the example tools. The third column depicts examples of Composite Flows defining structured interaction between *Atomic* or *Composite* Flows. The fourth column illustrates a specific *Composite* competitive coding Flow as those used in the experiments in the paper. The fifth column outlines the structure of a hypothetical Flow, defining a meta-reasoning process that could support autonomous behavior. + +FlowVerse in a Nutshell +---------------------------- + +The FlowVerse is a repository of Flows (powered by the 🤗 HuggingFace hub) created and shared by our community for everyone to use! With aiFlows, Flows can be readily downloaded, used, extended, or composed into novel, more complex Flows. For instance, sharing a Flow that uses only API-based tools (tools subsume models in the Flows abstraction) is as simple as sharing a config file (e.g., `here `_ is the AutoGPT Flow on FlowVerse). For the ones using ChatGPT, you could think of them as completely customizable open-source GPTs(++). + +The FlowVerse is continuously growing. To explore the currently available Flows, check out the FlowVerse Forum on the Discord `channel `_. Additionally, the *Tutorials* and *Detailed Examples* in the `Getting Started `_ sections cover some of the Flows we provide in more detail (e.g., the ChatAtomicFlow and QA, VisionAtomicFlow and VisualQA, ReAct and ReAct with human feedback, AutoGPT, etc.). + +Why should I use aiFlows? +---------------------------- + +AI is set to revolutionize the way we work. Our mission is to support AI researchers and to allow them to seamlessly share advancements with practitioners. This will establish a feedback loop, guiding progress toward beneficial directions while ensuring that everyone can freely access and benefit from the next-generation AI tools. + +As a researcher, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design, implement, and study arbitrarily complex interactions +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reproduce, reuse, or build on top of Flows shared on the FlowVerse and systematically study them across different settings (the infrastructure in the `cc_flows` repository could be a useful starting point in future studies). +- The ability to readily make your work accessible to practitioners and other researchers and access their feedback. + +As a practitioner, you will benefit from: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- The ability to design and implement arbitrarily complex interactions. +- Complete control and customizability (e.g., the tools, the specific Flows and the information they have access to, the choice of models and their deployment, etc.). +- The ability to readily reuse or build on top of Flows shared on the FlowVerse. +- Direct access to any advancements in the field. + +To develop the next-generation AI tools and simultaneously maximize the benefits, developers and researchers need to have complete control over their workflows. aiFlows strives to empower you to make each Flow your own! See the `contribute <../contributing_info/index.rst>`_ section for more information. diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..954237b --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/media/contribute_flow_1.png b/docs/media/contribute_flow_1.png new file mode 100644 index 0000000..a0b4d61 Binary files /dev/null and b/docs/media/contribute_flow_1.png differ diff --git a/docs/media/contribute_flow_2.png b/docs/media/contribute_flow_2.png new file mode 100644 index 0000000..30833bd Binary files /dev/null and b/docs/media/contribute_flow_2.png differ diff --git a/docs/media/contribute_flow_3.png b/docs/media/contribute_flow_3.png new file mode 100644 index 0000000..317d2db Binary files /dev/null and b/docs/media/contribute_flow_3.png differ diff --git a/docs/media/fig1_rounded_corners.png b/docs/media/fig1_rounded_corners.png new file mode 100644 index 0000000..177c3e3 Binary files /dev/null and b/docs/media/fig1_rounded_corners.png differ diff --git a/docs/media/logo_text_statement_alt_rounded_corners.png b/docs/media/logo_text_statement_alt_rounded_corners.png new file mode 100644 index 0000000..ef0009c Binary files /dev/null and b/docs/media/logo_text_statement_alt_rounded_corners.png differ diff --git a/docs/media/previous_flows_rounded.png b/docs/media/previous_flows_rounded.png new file mode 100644 index 0000000..a9db02c Binary files /dev/null and b/docs/media/previous_flows_rounded.png differ diff --git a/docs/media/publish_flow_1.png b/docs/media/publish_flow_1.png new file mode 100644 index 0000000..294a5e5 Binary files /dev/null and b/docs/media/publish_flow_1.png differ diff --git a/docs/media/publish_flow_2.png b/docs/media/publish_flow_2.png new file mode 100644 index 0000000..178b3c2 Binary files /dev/null and b/docs/media/publish_flow_2.png differ diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..8a8ff48 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1 @@ +sphinx-book-theme==1.0.1 diff --git a/docs/source/aiflows.backends.rst b/docs/source/aiflows.backends.rst new file mode 100644 index 0000000..aeb42aa --- /dev/null +++ b/docs/source/aiflows.backends.rst @@ -0,0 +1,29 @@ +aiflows.backends package +======================== + +Submodules +---------- + +aiflows.backends.api\_info module +--------------------------------- + +.. automodule:: aiflows.backends.api_info + :members: + :undoc-members: + :show-inheritance: + +aiflows.backends.llm\_lite module +--------------------------------- + +.. automodule:: aiflows.backends.llm_lite + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.backends + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.base_flows.rst b/docs/source/aiflows.base_flows.rst new file mode 100644 index 0000000..2fb8b8c --- /dev/null +++ b/docs/source/aiflows.base_flows.rst @@ -0,0 +1,61 @@ +aiflows.base\_flows package +=========================== + +Submodules +---------- + +aiflows.base\_flows.abstract module +----------------------------------- + +.. automodule:: aiflows.base_flows.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.atomic module +--------------------------------- + +.. automodule:: aiflows.base_flows.atomic + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.branching module +------------------------------------ + +.. automodule:: aiflows.base_flows.branching + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.circular module +----------------------------------- + +.. automodule:: aiflows.base_flows.circular + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.composite module +------------------------------------ + +.. automodule:: aiflows.base_flows.composite + :members: + :undoc-members: + :show-inheritance: + +aiflows.base\_flows.sequential module +------------------------------------- + +.. automodule:: aiflows.base_flows.sequential + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.base_flows + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.data_transformations.rst b/docs/source/aiflows.data_transformations.rst new file mode 100644 index 0000000..a9c3906 --- /dev/null +++ b/docs/source/aiflows.data_transformations.rst @@ -0,0 +1,109 @@ +aiflows.data\_transformations package +===================================== + +Submodules +---------- + +aiflows.data\_transformations.abstract module +--------------------------------------------- + +.. automodule:: aiflows.data_transformations.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.end\_of\_interaction module +--------------------------------------------------------- + +.. automodule:: aiflows.data_transformations.end_of_interaction + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.json module +----------------------------------------- + +.. automodule:: aiflows.data_transformations.json + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_copy module +---------------------------------------------- + +.. automodule:: aiflows.data_transformations.key_copy + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_delete module +------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_delete + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_match\_input module +------------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_match_input + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_rename module +------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_rename + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_select module +------------------------------------------------ + +.. automodule:: aiflows.data_transformations.key_select + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.key\_set module +--------------------------------------------- + +.. automodule:: aiflows.data_transformations.key_set + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.print\_previous\_messages module +-------------------------------------------------------------- + +.. automodule:: aiflows.data_transformations.print_previous_messages + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.regex\_extractor\_first module +------------------------------------------------------------ + +.. automodule:: aiflows.data_transformations.regex_extractor_first + :members: + :undoc-members: + :show-inheritance: + +aiflows.data\_transformations.unnesting\_dict module +---------------------------------------------------- + +.. automodule:: aiflows.data_transformations.unnesting_dict + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.data_transformations + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.datasets.rst b/docs/source/aiflows.datasets.rst new file mode 100644 index 0000000..9639658 --- /dev/null +++ b/docs/source/aiflows.datasets.rst @@ -0,0 +1,37 @@ +aiflows.datasets package +======================== + +Submodules +---------- + +aiflows.datasets.abstract module +-------------------------------- + +.. automodule:: aiflows.datasets.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.datasets.demonstrations\_11 module +------------------------------------------ + +.. automodule:: aiflows.datasets.demonstrations_11 + :members: + :undoc-members: + :show-inheritance: + +aiflows.datasets.outputs module +------------------------------- + +.. automodule:: aiflows.datasets.outputs + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.datasets + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.flow_cache.rst b/docs/source/aiflows.flow_cache.rst new file mode 100644 index 0000000..07e99c9 --- /dev/null +++ b/docs/source/aiflows.flow_cache.rst @@ -0,0 +1,21 @@ +aiflows.flow\_cache package +=========================== + +Submodules +---------- + +aiflows.flow\_cache.flow\_cache module +-------------------------------------- + +.. automodule:: aiflows.flow_cache.flow_cache + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.flow_cache + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.flow_launchers.rst b/docs/source/aiflows.flow_launchers.rst new file mode 100644 index 0000000..2e924b2 --- /dev/null +++ b/docs/source/aiflows.flow_launchers.rst @@ -0,0 +1,29 @@ +aiflows.flow\_launchers package +=============================== + +Submodules +---------- + +aiflows.flow\_launchers.abstract module +--------------------------------------- + +.. automodule:: aiflows.flow_launchers.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.flow\_launchers.flow\_API\_launcher module +-------------------------------------------------- + +.. automodule:: aiflows.flow_launchers.flow_API_launcher + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.flow_launchers + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.flow_verse.rst b/docs/source/aiflows.flow_verse.rst new file mode 100644 index 0000000..c9c024b --- /dev/null +++ b/docs/source/aiflows.flow_verse.rst @@ -0,0 +1,29 @@ +aiflows.flow\_verse package +=========================== + +Submodules +---------- + +aiflows.flow\_verse.loading module +---------------------------------- + +.. automodule:: aiflows.flow_verse.loading + :members: + :undoc-members: + :show-inheritance: + +aiflows.flow\_verse.utils module +-------------------------------- + +.. automodule:: aiflows.flow_verse.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.flow_verse + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.history.rst b/docs/source/aiflows.history.rst new file mode 100644 index 0000000..e47e4a4 --- /dev/null +++ b/docs/source/aiflows.history.rst @@ -0,0 +1,21 @@ +aiflows.history package +======================= + +Submodules +---------- + +aiflows.history.flow\_history module +------------------------------------ + +.. automodule:: aiflows.history.flow_history + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.history + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.interfaces.rst b/docs/source/aiflows.interfaces.rst new file mode 100644 index 0000000..555e56c --- /dev/null +++ b/docs/source/aiflows.interfaces.rst @@ -0,0 +1,29 @@ +aiflows.interfaces package +========================== + +Submodules +---------- + +aiflows.interfaces.abstract module +---------------------------------- + +.. automodule:: aiflows.interfaces.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.interfaces.key\_interface module +---------------------------------------- + +.. automodule:: aiflows.interfaces.key_interface + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.interfaces + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.messages.rst b/docs/source/aiflows.messages.rst new file mode 100644 index 0000000..a7b2044 --- /dev/null +++ b/docs/source/aiflows.messages.rst @@ -0,0 +1,29 @@ +aiflows.messages package +======================== + +Submodules +---------- + +aiflows.messages.abstract module +-------------------------------- + +.. automodule:: aiflows.messages.abstract + :members: + :undoc-members: + :show-inheritance: + +aiflows.messages.flow\_message module +------------------------------------- + +.. automodule:: aiflows.messages.flow_message + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.messages + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.prompt_template.rst b/docs/source/aiflows.prompt_template.rst new file mode 100644 index 0000000..261ef48 --- /dev/null +++ b/docs/source/aiflows.prompt_template.rst @@ -0,0 +1,21 @@ +aiflows.prompt\_template package +================================ + +Submodules +---------- + +aiflows.prompt\_template.jinja2\_prompts module +----------------------------------------------- + +.. automodule:: aiflows.prompt_template.jinja2_prompts + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.prompt_template + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.rst b/docs/source/aiflows.rst new file mode 100644 index 0000000..4ef6939 --- /dev/null +++ b/docs/source/aiflows.rst @@ -0,0 +1,29 @@ +aiflows package +=============== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + aiflows.backends + aiflows.base_flows + aiflows.data_transformations + aiflows.datasets + aiflows.flow_cache + aiflows.flow_launchers + aiflows.flow_verse + aiflows.history + aiflows.interfaces + aiflows.messages + aiflows.prompt_template + aiflows.utils + +Module contents +--------------- + +.. automodule:: aiflows + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/aiflows.utils.rst b/docs/source/aiflows.utils.rst new file mode 100644 index 0000000..fd4af16 --- /dev/null +++ b/docs/source/aiflows.utils.rst @@ -0,0 +1,45 @@ +aiflows.utils package +===================== + +Submodules +---------- + +aiflows.utils.general\_helpers module +------------------------------------- + +.. automodule:: aiflows.utils.general_helpers + :members: + :undoc-members: + :show-inheritance: + +aiflows.utils.io\_utils module +------------------------------ + +.. automodule:: aiflows.utils.io_utils + :members: + :undoc-members: + :show-inheritance: + +aiflows.utils.logging module +---------------------------- + +.. automodule:: aiflows.utils.logging + :members: + :undoc-members: + :show-inheritance: + +aiflows.utils.rich\_utils module +-------------------------------- + +.. automodule:: aiflows.utils.rich_utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: aiflows.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/modules.rst b/docs/source/modules.rst new file mode 100644 index 0000000..f823ffb --- /dev/null +++ b/docs/source/modules.rst @@ -0,0 +1,7 @@ +aiflows +======= + +.. toctree:: + :maxdepth: 4 + + aiflows diff --git a/fonts/fonts/glyphicons-halflings-regular.eot b/fonts/fonts/glyphicons-halflings-regular.eot new file mode 100644 index 0000000..b93a495 Binary files /dev/null and b/fonts/fonts/glyphicons-halflings-regular.eot differ diff --git a/fonts/fonts/glyphicons-halflings-regular.svg b/fonts/fonts/glyphicons-halflings-regular.svg new file mode 100644 index 0000000..187805a --- /dev/null +++ b/fonts/fonts/glyphicons-halflings-regular.svg @@ -0,0 +1,288 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/fonts/fonts/glyphicons-halflings-regular.ttf b/fonts/fonts/glyphicons-halflings-regular.ttf new file mode 100644 index 0000000..1413fc6 Binary files /dev/null and b/fonts/fonts/glyphicons-halflings-regular.ttf differ diff --git a/fonts/fonts/glyphicons-halflings-regular.woff b/fonts/fonts/glyphicons-halflings-regular.woff new file mode 100644 index 0000000..9e61285 Binary files /dev/null and b/fonts/fonts/glyphicons-halflings-regular.woff differ diff --git a/fonts/fonts/glyphicons-halflings-regular.woff2 b/fonts/fonts/glyphicons-halflings-regular.woff2 new file mode 100644 index 0000000..64539b5 Binary files /dev/null and b/fonts/fonts/glyphicons-halflings-regular.woff2 differ diff --git a/img/intro-bg.jpg b/img/intro-bg.jpg new file mode 100644 index 0000000..a8e8e74 Binary files /dev/null and b/img/intro-bg.jpg differ diff --git a/index.html b/index.html new file mode 100644 index 0000000..65d6892 --- /dev/null +++ b/index.html @@ -0,0 +1 @@ +aiFlows
\ No newline at end of file diff --git a/js/bootstrap.js b/js/bootstrap.js new file mode 100644 index 0000000..8a2e99a --- /dev/null +++ b/js/bootstrap.js @@ -0,0 +1,2377 @@ +/*! + * Bootstrap v3.3.7 (http://getbootstrap.com) + * Copyright 2011-2016 Twitter, Inc. + * Licensed under the MIT license + */ + +if (typeof jQuery === 'undefined') { + throw new Error('Bootstrap\'s JavaScript requires jQuery') +} + ++function ($) { + 'use strict'; + var version = $.fn.jquery.split(' ')[0].split('.') + if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) { + throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4') + } +}(jQuery); + +/* ======================================================================== + * Bootstrap: transition.js v3.3.7 + * http://getbootstrap.com/javascript/#transitions + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/) + // ============================================================ + + function transitionEnd() { + var el = document.createElement('bootstrap') + + var transEndEventNames = { + WebkitTransition : 'webkitTransitionEnd', + MozTransition : 'transitionend', + OTransition : 'oTransitionEnd otransitionend', + transition : 'transitionend' + } + + for (var name in transEndEventNames) { + if (el.style[name] !== undefined) { + return { end: transEndEventNames[name] } + } + } + + return false // explicit for ie8 ( ._.) + } + + // http://blog.alexmaccaw.com/css-transitions + $.fn.emulateTransitionEnd = function (duration) { + var called = false + var $el = this + $(this).one('bsTransitionEnd', function () { called = true }) + var callback = function () { if (!called) $($el).trigger($.support.transition.end) } + setTimeout(callback, duration) + return this + } + + $(function () { + $.support.transition = transitionEnd() + + if (!$.support.transition) return + + $.event.special.bsTransitionEnd = { + bindType: $.support.transition.end, + delegateType: $.support.transition.end, + handle: function (e) { + if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments) + } + } + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: alert.js v3.3.7 + * http://getbootstrap.com/javascript/#alerts + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // ALERT CLASS DEFINITION + // ====================== + + var dismiss = '[data-dismiss="alert"]' + var Alert = function (el) { + $(el).on('click', dismiss, this.close) + } + + Alert.VERSION = '3.3.7' + + Alert.TRANSITION_DURATION = 150 + + Alert.prototype.close = function (e) { + var $this = $(this) + var selector = $this.attr('data-target') + + if (!selector) { + selector = $this.attr('href') + selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 + } + + var $parent = $(selector === '#' ? [] : selector) + + if (e) e.preventDefault() + + if (!$parent.length) { + $parent = $this.closest('.alert') + } + + $parent.trigger(e = $.Event('close.bs.alert')) + + if (e.isDefaultPrevented()) return + + $parent.removeClass('in') + + function removeElement() { + // detach from parent, fire event then clean up data + $parent.detach().trigger('closed.bs.alert').remove() + } + + $.support.transition && $parent.hasClass('fade') ? + $parent + .one('bsTransitionEnd', removeElement) + .emulateTransitionEnd(Alert.TRANSITION_DURATION) : + removeElement() + } + + + // ALERT PLUGIN DEFINITION + // ======================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.alert') + + if (!data) $this.data('bs.alert', (data = new Alert(this))) + if (typeof option == 'string') data[option].call($this) + }) + } + + var old = $.fn.alert + + $.fn.alert = Plugin + $.fn.alert.Constructor = Alert + + + // ALERT NO CONFLICT + // ================= + + $.fn.alert.noConflict = function () { + $.fn.alert = old + return this + } + + + // ALERT DATA-API + // ============== + + $(document).on('click.bs.alert.data-api', dismiss, Alert.prototype.close) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: button.js v3.3.7 + * http://getbootstrap.com/javascript/#buttons + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // BUTTON PUBLIC CLASS DEFINITION + // ============================== + + var Button = function (element, options) { + this.$element = $(element) + this.options = $.extend({}, Button.DEFAULTS, options) + this.isLoading = false + } + + Button.VERSION = '3.3.7' + + Button.DEFAULTS = { + loadingText: 'loading...' + } + + Button.prototype.setState = function (state) { + var d = 'disabled' + var $el = this.$element + var val = $el.is('input') ? 'val' : 'html' + var data = $el.data() + + state += 'Text' + + if (data.resetText == null) $el.data('resetText', $el[val]()) + + // push to event loop to allow forms to submit + setTimeout($.proxy(function () { + $el[val](data[state] == null ? this.options[state] : data[state]) + + if (state == 'loadingText') { + this.isLoading = true + $el.addClass(d).attr(d, d).prop(d, true) + } else if (this.isLoading) { + this.isLoading = false + $el.removeClass(d).removeAttr(d).prop(d, false) + } + }, this), 0) + } + + Button.prototype.toggle = function () { + var changed = true + var $parent = this.$element.closest('[data-toggle="buttons"]') + + if ($parent.length) { + var $input = this.$element.find('input') + if ($input.prop('type') == 'radio') { + if ($input.prop('checked')) changed = false + $parent.find('.active').removeClass('active') + this.$element.addClass('active') + } else if ($input.prop('type') == 'checkbox') { + if (($input.prop('checked')) !== this.$element.hasClass('active')) changed = false + this.$element.toggleClass('active') + } + $input.prop('checked', this.$element.hasClass('active')) + if (changed) $input.trigger('change') + } else { + this.$element.attr('aria-pressed', !this.$element.hasClass('active')) + this.$element.toggleClass('active') + } + } + + + // BUTTON PLUGIN DEFINITION + // ======================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.button') + var options = typeof option == 'object' && option + + if (!data) $this.data('bs.button', (data = new Button(this, options))) + + if (option == 'toggle') data.toggle() + else if (option) data.setState(option) + }) + } + + var old = $.fn.button + + $.fn.button = Plugin + $.fn.button.Constructor = Button + + + // BUTTON NO CONFLICT + // ================== + + $.fn.button.noConflict = function () { + $.fn.button = old + return this + } + + + // BUTTON DATA-API + // =============== + + $(document) + .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) { + var $btn = $(e.target).closest('.btn') + Plugin.call($btn, 'toggle') + if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) { + // Prevent double click on radios, and the double selections (so cancellation) on checkboxes + e.preventDefault() + // The target component still receive the focus + if ($btn.is('input,button')) $btn.trigger('focus') + else $btn.find('input:visible,button:visible').first().trigger('focus') + } + }) + .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) { + $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type)) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: carousel.js v3.3.7 + * http://getbootstrap.com/javascript/#carousel + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // CAROUSEL CLASS DEFINITION + // ========================= + + var Carousel = function (element, options) { + this.$element = $(element) + this.$indicators = this.$element.find('.carousel-indicators') + this.options = options + this.paused = null + this.sliding = null + this.interval = null + this.$active = null + this.$items = null + + this.options.keyboard && this.$element.on('keydown.bs.carousel', $.proxy(this.keydown, this)) + + this.options.pause == 'hover' && !('ontouchstart' in document.documentElement) && this.$element + .on('mouseenter.bs.carousel', $.proxy(this.pause, this)) + .on('mouseleave.bs.carousel', $.proxy(this.cycle, this)) + } + + Carousel.VERSION = '3.3.7' + + Carousel.TRANSITION_DURATION = 600 + + Carousel.DEFAULTS = { + interval: 5000, + pause: 'hover', + wrap: true, + keyboard: true + } + + Carousel.prototype.keydown = function (e) { + if (/input|textarea/i.test(e.target.tagName)) return + switch (e.which) { + case 37: this.prev(); break + case 39: this.next(); break + default: return + } + + e.preventDefault() + } + + Carousel.prototype.cycle = function (e) { + e || (this.paused = false) + + this.interval && clearInterval(this.interval) + + this.options.interval + && !this.paused + && (this.interval = setInterval($.proxy(this.next, this), this.options.interval)) + + return this + } + + Carousel.prototype.getItemIndex = function (item) { + this.$items = item.parent().children('.item') + return this.$items.index(item || this.$active) + } + + Carousel.prototype.getItemForDirection = function (direction, active) { + var activeIndex = this.getItemIndex(active) + var willWrap = (direction == 'prev' && activeIndex === 0) + || (direction == 'next' && activeIndex == (this.$items.length - 1)) + if (willWrap && !this.options.wrap) return active + var delta = direction == 'prev' ? -1 : 1 + var itemIndex = (activeIndex + delta) % this.$items.length + return this.$items.eq(itemIndex) + } + + Carousel.prototype.to = function (pos) { + var that = this + var activeIndex = this.getItemIndex(this.$active = this.$element.find('.item.active')) + + if (pos > (this.$items.length - 1) || pos < 0) return + + if (this.sliding) return this.$element.one('slid.bs.carousel', function () { that.to(pos) }) // yes, "slid" + if (activeIndex == pos) return this.pause().cycle() + + return this.slide(pos > activeIndex ? 'next' : 'prev', this.$items.eq(pos)) + } + + Carousel.prototype.pause = function (e) { + e || (this.paused = true) + + if (this.$element.find('.next, .prev').length && $.support.transition) { + this.$element.trigger($.support.transition.end) + this.cycle(true) + } + + this.interval = clearInterval(this.interval) + + return this + } + + Carousel.prototype.next = function () { + if (this.sliding) return + return this.slide('next') + } + + Carousel.prototype.prev = function () { + if (this.sliding) return + return this.slide('prev') + } + + Carousel.prototype.slide = function (type, next) { + var $active = this.$element.find('.item.active') + var $next = next || this.getItemForDirection(type, $active) + var isCycling = this.interval + var direction = type == 'next' ? 'left' : 'right' + var that = this + + if ($next.hasClass('active')) return (this.sliding = false) + + var relatedTarget = $next[0] + var slideEvent = $.Event('slide.bs.carousel', { + relatedTarget: relatedTarget, + direction: direction + }) + this.$element.trigger(slideEvent) + if (slideEvent.isDefaultPrevented()) return + + this.sliding = true + + isCycling && this.pause() + + if (this.$indicators.length) { + this.$indicators.find('.active').removeClass('active') + var $nextIndicator = $(this.$indicators.children()[this.getItemIndex($next)]) + $nextIndicator && $nextIndicator.addClass('active') + } + + var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid" + if ($.support.transition && this.$element.hasClass('slide')) { + $next.addClass(type) + $next[0].offsetWidth // force reflow + $active.addClass(direction) + $next.addClass(direction) + $active + .one('bsTransitionEnd', function () { + $next.removeClass([type, direction].join(' ')).addClass('active') + $active.removeClass(['active', direction].join(' ')) + that.sliding = false + setTimeout(function () { + that.$element.trigger(slidEvent) + }, 0) + }) + .emulateTransitionEnd(Carousel.TRANSITION_DURATION) + } else { + $active.removeClass('active') + $next.addClass('active') + this.sliding = false + this.$element.trigger(slidEvent) + } + + isCycling && this.cycle() + + return this + } + + + // CAROUSEL PLUGIN DEFINITION + // ========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.carousel') + var options = $.extend({}, Carousel.DEFAULTS, $this.data(), typeof option == 'object' && option) + var action = typeof option == 'string' ? option : options.slide + + if (!data) $this.data('bs.carousel', (data = new Carousel(this, options))) + if (typeof option == 'number') data.to(option) + else if (action) data[action]() + else if (options.interval) data.pause().cycle() + }) + } + + var old = $.fn.carousel + + $.fn.carousel = Plugin + $.fn.carousel.Constructor = Carousel + + + // CAROUSEL NO CONFLICT + // ==================== + + $.fn.carousel.noConflict = function () { + $.fn.carousel = old + return this + } + + + // CAROUSEL DATA-API + // ================= + + var clickHandler = function (e) { + var href + var $this = $(this) + var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7 + if (!$target.hasClass('carousel')) return + var options = $.extend({}, $target.data(), $this.data()) + var slideIndex = $this.attr('data-slide-to') + if (slideIndex) options.interval = false + + Plugin.call($target, options) + + if (slideIndex) { + $target.data('bs.carousel').to(slideIndex) + } + + e.preventDefault() + } + + $(document) + .on('click.bs.carousel.data-api', '[data-slide]', clickHandler) + .on('click.bs.carousel.data-api', '[data-slide-to]', clickHandler) + + $(window).on('load', function () { + $('[data-ride="carousel"]').each(function () { + var $carousel = $(this) + Plugin.call($carousel, $carousel.data()) + }) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: collapse.js v3.3.7 + * http://getbootstrap.com/javascript/#collapse + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + +/* jshint latedef: false */ + ++function ($) { + 'use strict'; + + // COLLAPSE PUBLIC CLASS DEFINITION + // ================================ + + var Collapse = function (element, options) { + this.$element = $(element) + this.options = $.extend({}, Collapse.DEFAULTS, options) + this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' + + '[data-toggle="collapse"][data-target="#' + element.id + '"]') + this.transitioning = null + + if (this.options.parent) { + this.$parent = this.getParent() + } else { + this.addAriaAndCollapsedClass(this.$element, this.$trigger) + } + + if (this.options.toggle) this.toggle() + } + + Collapse.VERSION = '3.3.7' + + Collapse.TRANSITION_DURATION = 350 + + Collapse.DEFAULTS = { + toggle: true + } + + Collapse.prototype.dimension = function () { + var hasWidth = this.$element.hasClass('width') + return hasWidth ? 'width' : 'height' + } + + Collapse.prototype.show = function () { + if (this.transitioning || this.$element.hasClass('in')) return + + var activesData + var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing') + + if (actives && actives.length) { + activesData = actives.data('bs.collapse') + if (activesData && activesData.transitioning) return + } + + var startEvent = $.Event('show.bs.collapse') + this.$element.trigger(startEvent) + if (startEvent.isDefaultPrevented()) return + + if (actives && actives.length) { + Plugin.call(actives, 'hide') + activesData || actives.data('bs.collapse', null) + } + + var dimension = this.dimension() + + this.$element + .removeClass('collapse') + .addClass('collapsing')[dimension](0) + .attr('aria-expanded', true) + + this.$trigger + .removeClass('collapsed') + .attr('aria-expanded', true) + + this.transitioning = 1 + + var complete = function () { + this.$element + .removeClass('collapsing') + .addClass('collapse in')[dimension]('') + this.transitioning = 0 + this.$element + .trigger('shown.bs.collapse') + } + + if (!$.support.transition) return complete.call(this) + + var scrollSize = $.camelCase(['scroll', dimension].join('-')) + + this.$element + .one('bsTransitionEnd', $.proxy(complete, this)) + .emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize]) + } + + Collapse.prototype.hide = function () { + if (this.transitioning || !this.$element.hasClass('in')) return + + var startEvent = $.Event('hide.bs.collapse') + this.$element.trigger(startEvent) + if (startEvent.isDefaultPrevented()) return + + var dimension = this.dimension() + + this.$element[dimension](this.$element[dimension]())[0].offsetHeight + + this.$element + .addClass('collapsing') + .removeClass('collapse in') + .attr('aria-expanded', false) + + this.$trigger + .addClass('collapsed') + .attr('aria-expanded', false) + + this.transitioning = 1 + + var complete = function () { + this.transitioning = 0 + this.$element + .removeClass('collapsing') + .addClass('collapse') + .trigger('hidden.bs.collapse') + } + + if (!$.support.transition) return complete.call(this) + + this.$element + [dimension](0) + .one('bsTransitionEnd', $.proxy(complete, this)) + .emulateTransitionEnd(Collapse.TRANSITION_DURATION) + } + + Collapse.prototype.toggle = function () { + this[this.$element.hasClass('in') ? 'hide' : 'show']() + } + + Collapse.prototype.getParent = function () { + return $(this.options.parent) + .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]') + .each($.proxy(function (i, element) { + var $element = $(element) + this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element) + }, this)) + .end() + } + + Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) { + var isOpen = $element.hasClass('in') + + $element.attr('aria-expanded', isOpen) + $trigger + .toggleClass('collapsed', !isOpen) + .attr('aria-expanded', isOpen) + } + + function getTargetFromTrigger($trigger) { + var href + var target = $trigger.attr('data-target') + || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 + + return $(target) + } + + + // COLLAPSE PLUGIN DEFINITION + // ========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.collapse') + var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option) + + if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false + if (!data) $this.data('bs.collapse', (data = new Collapse(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.collapse + + $.fn.collapse = Plugin + $.fn.collapse.Constructor = Collapse + + + // COLLAPSE NO CONFLICT + // ==================== + + $.fn.collapse.noConflict = function () { + $.fn.collapse = old + return this + } + + + // COLLAPSE DATA-API + // ================= + + $(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) { + var $this = $(this) + + if (!$this.attr('data-target')) e.preventDefault() + + var $target = getTargetFromTrigger($this) + var data = $target.data('bs.collapse') + var option = data ? 'toggle' : $this.data() + + Plugin.call($target, option) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: dropdown.js v3.3.7 + * http://getbootstrap.com/javascript/#dropdowns + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // DROPDOWN CLASS DEFINITION + // ========================= + + var backdrop = '.dropdown-backdrop' + var toggle = '[data-toggle="dropdown"]' + var Dropdown = function (element) { + $(element).on('click.bs.dropdown', this.toggle) + } + + Dropdown.VERSION = '3.3.7' + + function getParent($this) { + var selector = $this.attr('data-target') + + if (!selector) { + selector = $this.attr('href') + selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 + } + + var $parent = selector && $(selector) + + return $parent && $parent.length ? $parent : $this.parent() + } + + function clearMenus(e) { + if (e && e.which === 3) return + $(backdrop).remove() + $(toggle).each(function () { + var $this = $(this) + var $parent = getParent($this) + var relatedTarget = { relatedTarget: this } + + if (!$parent.hasClass('open')) return + + if (e && e.type == 'click' && /input|textarea/i.test(e.target.tagName) && $.contains($parent[0], e.target)) return + + $parent.trigger(e = $.Event('hide.bs.dropdown', relatedTarget)) + + if (e.isDefaultPrevented()) return + + $this.attr('aria-expanded', 'false') + $parent.removeClass('open').trigger($.Event('hidden.bs.dropdown', relatedTarget)) + }) + } + + Dropdown.prototype.toggle = function (e) { + var $this = $(this) + + if ($this.is('.disabled, :disabled')) return + + var $parent = getParent($this) + var isActive = $parent.hasClass('open') + + clearMenus() + + if (!isActive) { + if ('ontouchstart' in document.documentElement && !$parent.closest('.navbar-nav').length) { + // if mobile we use a backdrop because click events don't delegate + $(document.createElement('div')) + .addClass('dropdown-backdrop') + .insertAfter($(this)) + .on('click', clearMenus) + } + + var relatedTarget = { relatedTarget: this } + $parent.trigger(e = $.Event('show.bs.dropdown', relatedTarget)) + + if (e.isDefaultPrevented()) return + + $this + .trigger('focus') + .attr('aria-expanded', 'true') + + $parent + .toggleClass('open') + .trigger($.Event('shown.bs.dropdown', relatedTarget)) + } + + return false + } + + Dropdown.prototype.keydown = function (e) { + if (!/(38|40|27|32)/.test(e.which) || /input|textarea/i.test(e.target.tagName)) return + + var $this = $(this) + + e.preventDefault() + e.stopPropagation() + + if ($this.is('.disabled, :disabled')) return + + var $parent = getParent($this) + var isActive = $parent.hasClass('open') + + if (!isActive && e.which != 27 || isActive && e.which == 27) { + if (e.which == 27) $parent.find(toggle).trigger('focus') + return $this.trigger('click') + } + + var desc = ' li:not(.disabled):visible a' + var $items = $parent.find('.dropdown-menu' + desc) + + if (!$items.length) return + + var index = $items.index(e.target) + + if (e.which == 38 && index > 0) index-- // up + if (e.which == 40 && index < $items.length - 1) index++ // down + if (!~index) index = 0 + + $items.eq(index).trigger('focus') + } + + + // DROPDOWN PLUGIN DEFINITION + // ========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.dropdown') + + if (!data) $this.data('bs.dropdown', (data = new Dropdown(this))) + if (typeof option == 'string') data[option].call($this) + }) + } + + var old = $.fn.dropdown + + $.fn.dropdown = Plugin + $.fn.dropdown.Constructor = Dropdown + + + // DROPDOWN NO CONFLICT + // ==================== + + $.fn.dropdown.noConflict = function () { + $.fn.dropdown = old + return this + } + + + // APPLY TO STANDARD DROPDOWN ELEMENTS + // =================================== + + $(document) + .on('click.bs.dropdown.data-api', clearMenus) + .on('click.bs.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() }) + .on('click.bs.dropdown.data-api', toggle, Dropdown.prototype.toggle) + .on('keydown.bs.dropdown.data-api', toggle, Dropdown.prototype.keydown) + .on('keydown.bs.dropdown.data-api', '.dropdown-menu', Dropdown.prototype.keydown) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: modal.js v3.3.7 + * http://getbootstrap.com/javascript/#modals + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // MODAL CLASS DEFINITION + // ====================== + + var Modal = function (element, options) { + this.options = options + this.$body = $(document.body) + this.$element = $(element) + this.$dialog = this.$element.find('.modal-dialog') + this.$backdrop = null + this.isShown = null + this.originalBodyPad = null + this.scrollbarWidth = 0 + this.ignoreBackdropClick = false + + if (this.options.remote) { + this.$element + .find('.modal-content') + .load(this.options.remote, $.proxy(function () { + this.$element.trigger('loaded.bs.modal') + }, this)) + } + } + + Modal.VERSION = '3.3.7' + + Modal.TRANSITION_DURATION = 300 + Modal.BACKDROP_TRANSITION_DURATION = 150 + + Modal.DEFAULTS = { + backdrop: true, + keyboard: true, + show: true + } + + Modal.prototype.toggle = function (_relatedTarget) { + return this.isShown ? this.hide() : this.show(_relatedTarget) + } + + Modal.prototype.show = function (_relatedTarget) { + var that = this + var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) + + this.$element.trigger(e) + + if (this.isShown || e.isDefaultPrevented()) return + + this.isShown = true + + this.checkScrollbar() + this.setScrollbar() + this.$body.addClass('modal-open') + + this.escape() + this.resize() + + this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this)) + + this.$dialog.on('mousedown.dismiss.bs.modal', function () { + that.$element.one('mouseup.dismiss.bs.modal', function (e) { + if ($(e.target).is(that.$element)) that.ignoreBackdropClick = true + }) + }) + + this.backdrop(function () { + var transition = $.support.transition && that.$element.hasClass('fade') + + if (!that.$element.parent().length) { + that.$element.appendTo(that.$body) // don't move modals dom position + } + + that.$element + .show() + .scrollTop(0) + + that.adjustDialog() + + if (transition) { + that.$element[0].offsetWidth // force reflow + } + + that.$element.addClass('in') + + that.enforceFocus() + + var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget }) + + transition ? + that.$dialog // wait for modal to slide in + .one('bsTransitionEnd', function () { + that.$element.trigger('focus').trigger(e) + }) + .emulateTransitionEnd(Modal.TRANSITION_DURATION) : + that.$element.trigger('focus').trigger(e) + }) + } + + Modal.prototype.hide = function (e) { + if (e) e.preventDefault() + + e = $.Event('hide.bs.modal') + + this.$element.trigger(e) + + if (!this.isShown || e.isDefaultPrevented()) return + + this.isShown = false + + this.escape() + this.resize() + + $(document).off('focusin.bs.modal') + + this.$element + .removeClass('in') + .off('click.dismiss.bs.modal') + .off('mouseup.dismiss.bs.modal') + + this.$dialog.off('mousedown.dismiss.bs.modal') + + $.support.transition && this.$element.hasClass('fade') ? + this.$element + .one('bsTransitionEnd', $.proxy(this.hideModal, this)) + .emulateTransitionEnd(Modal.TRANSITION_DURATION) : + this.hideModal() + } + + Modal.prototype.enforceFocus = function () { + $(document) + .off('focusin.bs.modal') // guard against infinite focus loop + .on('focusin.bs.modal', $.proxy(function (e) { + if (document !== e.target && + this.$element[0] !== e.target && + !this.$element.has(e.target).length) { + this.$element.trigger('focus') + } + }, this)) + } + + Modal.prototype.escape = function () { + if (this.isShown && this.options.keyboard) { + this.$element.on('keydown.dismiss.bs.modal', $.proxy(function (e) { + e.which == 27 && this.hide() + }, this)) + } else if (!this.isShown) { + this.$element.off('keydown.dismiss.bs.modal') + } + } + + Modal.prototype.resize = function () { + if (this.isShown) { + $(window).on('resize.bs.modal', $.proxy(this.handleUpdate, this)) + } else { + $(window).off('resize.bs.modal') + } + } + + Modal.prototype.hideModal = function () { + var that = this + this.$element.hide() + this.backdrop(function () { + that.$body.removeClass('modal-open') + that.resetAdjustments() + that.resetScrollbar() + that.$element.trigger('hidden.bs.modal') + }) + } + + Modal.prototype.removeBackdrop = function () { + this.$backdrop && this.$backdrop.remove() + this.$backdrop = null + } + + Modal.prototype.backdrop = function (callback) { + var that = this + var animate = this.$element.hasClass('fade') ? 'fade' : '' + + if (this.isShown && this.options.backdrop) { + var doAnimate = $.support.transition && animate + + this.$backdrop = $(document.createElement('div')) + .addClass('modal-backdrop ' + animate) + .appendTo(this.$body) + + this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) { + if (this.ignoreBackdropClick) { + this.ignoreBackdropClick = false + return + } + if (e.target !== e.currentTarget) return + this.options.backdrop == 'static' + ? this.$element[0].focus() + : this.hide() + }, this)) + + if (doAnimate) this.$backdrop[0].offsetWidth // force reflow + + this.$backdrop.addClass('in') + + if (!callback) return + + doAnimate ? + this.$backdrop + .one('bsTransitionEnd', callback) + .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) : + callback() + + } else if (!this.isShown && this.$backdrop) { + this.$backdrop.removeClass('in') + + var callbackRemove = function () { + that.removeBackdrop() + callback && callback() + } + $.support.transition && this.$element.hasClass('fade') ? + this.$backdrop + .one('bsTransitionEnd', callbackRemove) + .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) : + callbackRemove() + + } else if (callback) { + callback() + } + } + + // these following methods are used to handle overflowing modals + + Modal.prototype.handleUpdate = function () { + this.adjustDialog() + } + + Modal.prototype.adjustDialog = function () { + var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight + + this.$element.css({ + paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '', + paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : '' + }) + } + + Modal.prototype.resetAdjustments = function () { + this.$element.css({ + paddingLeft: '', + paddingRight: '' + }) + } + + Modal.prototype.checkScrollbar = function () { + var fullWindowWidth = window.innerWidth + if (!fullWindowWidth) { // workaround for missing window.innerWidth in IE8 + var documentElementRect = document.documentElement.getBoundingClientRect() + fullWindowWidth = documentElementRect.right - Math.abs(documentElementRect.left) + } + this.bodyIsOverflowing = document.body.clientWidth < fullWindowWidth + this.scrollbarWidth = this.measureScrollbar() + } + + Modal.prototype.setScrollbar = function () { + var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10) + this.originalBodyPad = document.body.style.paddingRight || '' + if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth) + } + + Modal.prototype.resetScrollbar = function () { + this.$body.css('padding-right', this.originalBodyPad) + } + + Modal.prototype.measureScrollbar = function () { // thx walsh + var scrollDiv = document.createElement('div') + scrollDiv.className = 'modal-scrollbar-measure' + this.$body.append(scrollDiv) + var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth + this.$body[0].removeChild(scrollDiv) + return scrollbarWidth + } + + + // MODAL PLUGIN DEFINITION + // ======================= + + function Plugin(option, _relatedTarget) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.modal') + var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option) + + if (!data) $this.data('bs.modal', (data = new Modal(this, options))) + if (typeof option == 'string') data[option](_relatedTarget) + else if (options.show) data.show(_relatedTarget) + }) + } + + var old = $.fn.modal + + $.fn.modal = Plugin + $.fn.modal.Constructor = Modal + + + // MODAL NO CONFLICT + // ================= + + $.fn.modal.noConflict = function () { + $.fn.modal = old + return this + } + + + // MODAL DATA-API + // ============== + + $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) { + var $this = $(this) + var href = $this.attr('href') + var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7 + var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) + + if ($this.is('a')) e.preventDefault() + + $target.one('show.bs.modal', function (showEvent) { + if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown + $target.one('hidden.bs.modal', function () { + $this.is(':visible') && $this.trigger('focus') + }) + }) + Plugin.call($target, option, this) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: tooltip.js v3.3.7 + * http://getbootstrap.com/javascript/#tooltip + * Inspired by the original jQuery.tipsy by Jason Frame + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // TOOLTIP PUBLIC CLASS DEFINITION + // =============================== + + var Tooltip = function (element, options) { + this.type = null + this.options = null + this.enabled = null + this.timeout = null + this.hoverState = null + this.$element = null + this.inState = null + + this.init('tooltip', element, options) + } + + Tooltip.VERSION = '3.3.7' + + Tooltip.TRANSITION_DURATION = 150 + + Tooltip.DEFAULTS = { + animation: true, + placement: 'top', + selector: false, + template: '', + trigger: 'hover focus', + title: '', + delay: 0, + html: false, + container: false, + viewport: { + selector: 'body', + padding: 0 + } + } + + Tooltip.prototype.init = function (type, element, options) { + this.enabled = true + this.type = type + this.$element = $(element) + this.options = this.getOptions(options) + this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport)) + this.inState = { click: false, hover: false, focus: false } + + if (this.$element[0] instanceof document.constructor && !this.options.selector) { + throw new Error('`selector` option must be specified when initializing ' + this.type + ' on the window.document object!') + } + + var triggers = this.options.trigger.split(' ') + + for (var i = triggers.length; i--;) { + var trigger = triggers[i] + + if (trigger == 'click') { + this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this)) + } else if (trigger != 'manual') { + var eventIn = trigger == 'hover' ? 'mouseenter' : 'focusin' + var eventOut = trigger == 'hover' ? 'mouseleave' : 'focusout' + + this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this)) + this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this)) + } + } + + this.options.selector ? + (this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) : + this.fixTitle() + } + + Tooltip.prototype.getDefaults = function () { + return Tooltip.DEFAULTS + } + + Tooltip.prototype.getOptions = function (options) { + options = $.extend({}, this.getDefaults(), this.$element.data(), options) + + if (options.delay && typeof options.delay == 'number') { + options.delay = { + show: options.delay, + hide: options.delay + } + } + + return options + } + + Tooltip.prototype.getDelegateOptions = function () { + var options = {} + var defaults = this.getDefaults() + + this._options && $.each(this._options, function (key, value) { + if (defaults[key] != value) options[key] = value + }) + + return options + } + + Tooltip.prototype.enter = function (obj) { + var self = obj instanceof this.constructor ? + obj : $(obj.currentTarget).data('bs.' + this.type) + + if (!self) { + self = new this.constructor(obj.currentTarget, this.getDelegateOptions()) + $(obj.currentTarget).data('bs.' + this.type, self) + } + + if (obj instanceof $.Event) { + self.inState[obj.type == 'focusin' ? 'focus' : 'hover'] = true + } + + if (self.tip().hasClass('in') || self.hoverState == 'in') { + self.hoverState = 'in' + return + } + + clearTimeout(self.timeout) + + self.hoverState = 'in' + + if (!self.options.delay || !self.options.delay.show) return self.show() + + self.timeout = setTimeout(function () { + if (self.hoverState == 'in') self.show() + }, self.options.delay.show) + } + + Tooltip.prototype.isInStateTrue = function () { + for (var key in this.inState) { + if (this.inState[key]) return true + } + + return false + } + + Tooltip.prototype.leave = function (obj) { + var self = obj instanceof this.constructor ? + obj : $(obj.currentTarget).data('bs.' + this.type) + + if (!self) { + self = new this.constructor(obj.currentTarget, this.getDelegateOptions()) + $(obj.currentTarget).data('bs.' + this.type, self) + } + + if (obj instanceof $.Event) { + self.inState[obj.type == 'focusout' ? 'focus' : 'hover'] = false + } + + if (self.isInStateTrue()) return + + clearTimeout(self.timeout) + + self.hoverState = 'out' + + if (!self.options.delay || !self.options.delay.hide) return self.hide() + + self.timeout = setTimeout(function () { + if (self.hoverState == 'out') self.hide() + }, self.options.delay.hide) + } + + Tooltip.prototype.show = function () { + var e = $.Event('show.bs.' + this.type) + + if (this.hasContent() && this.enabled) { + this.$element.trigger(e) + + var inDom = $.contains(this.$element[0].ownerDocument.documentElement, this.$element[0]) + if (e.isDefaultPrevented() || !inDom) return + var that = this + + var $tip = this.tip() + + var tipId = this.getUID(this.type) + + this.setContent() + $tip.attr('id', tipId) + this.$element.attr('aria-describedby', tipId) + + if (this.options.animation) $tip.addClass('fade') + + var placement = typeof this.options.placement == 'function' ? + this.options.placement.call(this, $tip[0], this.$element[0]) : + this.options.placement + + var autoToken = /\s?auto?\s?/i + var autoPlace = autoToken.test(placement) + if (autoPlace) placement = placement.replace(autoToken, '') || 'top' + + $tip + .detach() + .css({ top: 0, left: 0, display: 'block' }) + .addClass(placement) + .data('bs.' + this.type, this) + + this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element) + this.$element.trigger('inserted.bs.' + this.type) + + var pos = this.getPosition() + var actualWidth = $tip[0].offsetWidth + var actualHeight = $tip[0].offsetHeight + + if (autoPlace) { + var orgPlacement = placement + var viewportDim = this.getPosition(this.$viewport) + + placement = placement == 'bottom' && pos.bottom + actualHeight > viewportDim.bottom ? 'top' : + placement == 'top' && pos.top - actualHeight < viewportDim.top ? 'bottom' : + placement == 'right' && pos.right + actualWidth > viewportDim.width ? 'left' : + placement == 'left' && pos.left - actualWidth < viewportDim.left ? 'right' : + placement + + $tip + .removeClass(orgPlacement) + .addClass(placement) + } + + var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight) + + this.applyPlacement(calculatedOffset, placement) + + var complete = function () { + var prevHoverState = that.hoverState + that.$element.trigger('shown.bs.' + that.type) + that.hoverState = null + + if (prevHoverState == 'out') that.leave(that) + } + + $.support.transition && this.$tip.hasClass('fade') ? + $tip + .one('bsTransitionEnd', complete) + .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) : + complete() + } + } + + Tooltip.prototype.applyPlacement = function (offset, placement) { + var $tip = this.tip() + var width = $tip[0].offsetWidth + var height = $tip[0].offsetHeight + + // manually read margins because getBoundingClientRect includes difference + var marginTop = parseInt($tip.css('margin-top'), 10) + var marginLeft = parseInt($tip.css('margin-left'), 10) + + // we must check for NaN for ie 8/9 + if (isNaN(marginTop)) marginTop = 0 + if (isNaN(marginLeft)) marginLeft = 0 + + offset.top += marginTop + offset.left += marginLeft + + // $.fn.offset doesn't round pixel values + // so we use setOffset directly with our own function B-0 + $.offset.setOffset($tip[0], $.extend({ + using: function (props) { + $tip.css({ + top: Math.round(props.top), + left: Math.round(props.left) + }) + } + }, offset), 0) + + $tip.addClass('in') + + // check to see if placing tip in new offset caused the tip to resize itself + var actualWidth = $tip[0].offsetWidth + var actualHeight = $tip[0].offsetHeight + + if (placement == 'top' && actualHeight != height) { + offset.top = offset.top + height - actualHeight + } + + var delta = this.getViewportAdjustedDelta(placement, offset, actualWidth, actualHeight) + + if (delta.left) offset.left += delta.left + else offset.top += delta.top + + var isVertical = /top|bottom/.test(placement) + var arrowDelta = isVertical ? delta.left * 2 - width + actualWidth : delta.top * 2 - height + actualHeight + var arrowOffsetPosition = isVertical ? 'offsetWidth' : 'offsetHeight' + + $tip.offset(offset) + this.replaceArrow(arrowDelta, $tip[0][arrowOffsetPosition], isVertical) + } + + Tooltip.prototype.replaceArrow = function (delta, dimension, isVertical) { + this.arrow() + .css(isVertical ? 'left' : 'top', 50 * (1 - delta / dimension) + '%') + .css(isVertical ? 'top' : 'left', '') + } + + Tooltip.prototype.setContent = function () { + var $tip = this.tip() + var title = this.getTitle() + + $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title) + $tip.removeClass('fade in top bottom left right') + } + + Tooltip.prototype.hide = function (callback) { + var that = this + var $tip = $(this.$tip) + var e = $.Event('hide.bs.' + this.type) + + function complete() { + if (that.hoverState != 'in') $tip.detach() + if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary. + that.$element + .removeAttr('aria-describedby') + .trigger('hidden.bs.' + that.type) + } + callback && callback() + } + + this.$element.trigger(e) + + if (e.isDefaultPrevented()) return + + $tip.removeClass('in') + + $.support.transition && $tip.hasClass('fade') ? + $tip + .one('bsTransitionEnd', complete) + .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) : + complete() + + this.hoverState = null + + return this + } + + Tooltip.prototype.fixTitle = function () { + var $e = this.$element + if ($e.attr('title') || typeof $e.attr('data-original-title') != 'string') { + $e.attr('data-original-title', $e.attr('title') || '').attr('title', '') + } + } + + Tooltip.prototype.hasContent = function () { + return this.getTitle() + } + + Tooltip.prototype.getPosition = function ($element) { + $element = $element || this.$element + + var el = $element[0] + var isBody = el.tagName == 'BODY' + + var elRect = el.getBoundingClientRect() + if (elRect.width == null) { + // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093 + elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top }) + } + var isSvg = window.SVGElement && el instanceof window.SVGElement + // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3. + // See https://github.com/twbs/bootstrap/issues/20280 + var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset()) + var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() } + var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null + + return $.extend({}, elRect, scroll, outerDims, elOffset) + } + + Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) { + return placement == 'bottom' ? { top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2 } : + placement == 'top' ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2 } : + placement == 'left' ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } : + /* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width } + + } + + Tooltip.prototype.getViewportAdjustedDelta = function (placement, pos, actualWidth, actualHeight) { + var delta = { top: 0, left: 0 } + if (!this.$viewport) return delta + + var viewportPadding = this.options.viewport && this.options.viewport.padding || 0 + var viewportDimensions = this.getPosition(this.$viewport) + + if (/right|left/.test(placement)) { + var topEdgeOffset = pos.top - viewportPadding - viewportDimensions.scroll + var bottomEdgeOffset = pos.top + viewportPadding - viewportDimensions.scroll + actualHeight + if (topEdgeOffset < viewportDimensions.top) { // top overflow + delta.top = viewportDimensions.top - topEdgeOffset + } else if (bottomEdgeOffset > viewportDimensions.top + viewportDimensions.height) { // bottom overflow + delta.top = viewportDimensions.top + viewportDimensions.height - bottomEdgeOffset + } + } else { + var leftEdgeOffset = pos.left - viewportPadding + var rightEdgeOffset = pos.left + viewportPadding + actualWidth + if (leftEdgeOffset < viewportDimensions.left) { // left overflow + delta.left = viewportDimensions.left - leftEdgeOffset + } else if (rightEdgeOffset > viewportDimensions.right) { // right overflow + delta.left = viewportDimensions.left + viewportDimensions.width - rightEdgeOffset + } + } + + return delta + } + + Tooltip.prototype.getTitle = function () { + var title + var $e = this.$element + var o = this.options + + title = $e.attr('data-original-title') + || (typeof o.title == 'function' ? o.title.call($e[0]) : o.title) + + return title + } + + Tooltip.prototype.getUID = function (prefix) { + do prefix += ~~(Math.random() * 1000000) + while (document.getElementById(prefix)) + return prefix + } + + Tooltip.prototype.tip = function () { + if (!this.$tip) { + this.$tip = $(this.options.template) + if (this.$tip.length != 1) { + throw new Error(this.type + ' `template` option must consist of exactly 1 top-level element!') + } + } + return this.$tip + } + + Tooltip.prototype.arrow = function () { + return (this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow')) + } + + Tooltip.prototype.enable = function () { + this.enabled = true + } + + Tooltip.prototype.disable = function () { + this.enabled = false + } + + Tooltip.prototype.toggleEnabled = function () { + this.enabled = !this.enabled + } + + Tooltip.prototype.toggle = function (e) { + var self = this + if (e) { + self = $(e.currentTarget).data('bs.' + this.type) + if (!self) { + self = new this.constructor(e.currentTarget, this.getDelegateOptions()) + $(e.currentTarget).data('bs.' + this.type, self) + } + } + + if (e) { + self.inState.click = !self.inState.click + if (self.isInStateTrue()) self.enter(self) + else self.leave(self) + } else { + self.tip().hasClass('in') ? self.leave(self) : self.enter(self) + } + } + + Tooltip.prototype.destroy = function () { + var that = this + clearTimeout(this.timeout) + this.hide(function () { + that.$element.off('.' + that.type).removeData('bs.' + that.type) + if (that.$tip) { + that.$tip.detach() + } + that.$tip = null + that.$arrow = null + that.$viewport = null + that.$element = null + }) + } + + + // TOOLTIP PLUGIN DEFINITION + // ========================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.tooltip') + var options = typeof option == 'object' && option + + if (!data && /destroy|hide/.test(option)) return + if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.tooltip + + $.fn.tooltip = Plugin + $.fn.tooltip.Constructor = Tooltip + + + // TOOLTIP NO CONFLICT + // =================== + + $.fn.tooltip.noConflict = function () { + $.fn.tooltip = old + return this + } + +}(jQuery); + +/* ======================================================================== + * Bootstrap: popover.js v3.3.7 + * http://getbootstrap.com/javascript/#popovers + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // POPOVER PUBLIC CLASS DEFINITION + // =============================== + + var Popover = function (element, options) { + this.init('popover', element, options) + } + + if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js') + + Popover.VERSION = '3.3.7' + + Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, { + placement: 'right', + trigger: 'click', + content: '', + template: '' + }) + + + // NOTE: POPOVER EXTENDS tooltip.js + // ================================ + + Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype) + + Popover.prototype.constructor = Popover + + Popover.prototype.getDefaults = function () { + return Popover.DEFAULTS + } + + Popover.prototype.setContent = function () { + var $tip = this.tip() + var title = this.getTitle() + var content = this.getContent() + + $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title) + $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events + this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text' + ](content) + + $tip.removeClass('fade top bottom left right in') + + // IE8 doesn't accept hiding via the `:empty` pseudo selector, we have to do + // this manually by checking the contents. + if (!$tip.find('.popover-title').html()) $tip.find('.popover-title').hide() + } + + Popover.prototype.hasContent = function () { + return this.getTitle() || this.getContent() + } + + Popover.prototype.getContent = function () { + var $e = this.$element + var o = this.options + + return $e.attr('data-content') + || (typeof o.content == 'function' ? + o.content.call($e[0]) : + o.content) + } + + Popover.prototype.arrow = function () { + return (this.$arrow = this.$arrow || this.tip().find('.arrow')) + } + + + // POPOVER PLUGIN DEFINITION + // ========================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.popover') + var options = typeof option == 'object' && option + + if (!data && /destroy|hide/.test(option)) return + if (!data) $this.data('bs.popover', (data = new Popover(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.popover + + $.fn.popover = Plugin + $.fn.popover.Constructor = Popover + + + // POPOVER NO CONFLICT + // =================== + + $.fn.popover.noConflict = function () { + $.fn.popover = old + return this + } + +}(jQuery); + +/* ======================================================================== + * Bootstrap: scrollspy.js v3.3.7 + * http://getbootstrap.com/javascript/#scrollspy + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // SCROLLSPY CLASS DEFINITION + // ========================== + + function ScrollSpy(element, options) { + this.$body = $(document.body) + this.$scrollElement = $(element).is(document.body) ? $(window) : $(element) + this.options = $.extend({}, ScrollSpy.DEFAULTS, options) + this.selector = (this.options.target || '') + ' .nav li > a' + this.offsets = [] + this.targets = [] + this.activeTarget = null + this.scrollHeight = 0 + + this.$scrollElement.on('scroll.bs.scrollspy', $.proxy(this.process, this)) + this.refresh() + this.process() + } + + ScrollSpy.VERSION = '3.3.7' + + ScrollSpy.DEFAULTS = { + offset: 10 + } + + ScrollSpy.prototype.getScrollHeight = function () { + return this.$scrollElement[0].scrollHeight || Math.max(this.$body[0].scrollHeight, document.documentElement.scrollHeight) + } + + ScrollSpy.prototype.refresh = function () { + var that = this + var offsetMethod = 'offset' + var offsetBase = 0 + + this.offsets = [] + this.targets = [] + this.scrollHeight = this.getScrollHeight() + + if (!$.isWindow(this.$scrollElement[0])) { + offsetMethod = 'position' + offsetBase = this.$scrollElement.scrollTop() + } + + this.$body + .find(this.selector) + .map(function () { + var $el = $(this) + var href = $el.data('target') || $el.attr('href') + var $href = /^#./.test(href) && $(href) + + return ($href + && $href.length + && $href.is(':visible') + && [[$href[offsetMethod]().top + offsetBase, href]]) || null + }) + .sort(function (a, b) { return a[0] - b[0] }) + .each(function () { + that.offsets.push(this[0]) + that.targets.push(this[1]) + }) + } + + ScrollSpy.prototype.process = function () { + var scrollTop = this.$scrollElement.scrollTop() + this.options.offset + var scrollHeight = this.getScrollHeight() + var maxScroll = this.options.offset + scrollHeight - this.$scrollElement.height() + var offsets = this.offsets + var targets = this.targets + var activeTarget = this.activeTarget + var i + + if (this.scrollHeight != scrollHeight) { + this.refresh() + } + + if (scrollTop >= maxScroll) { + return activeTarget != (i = targets[targets.length - 1]) && this.activate(i) + } + + if (activeTarget && scrollTop < offsets[0]) { + this.activeTarget = null + return this.clear() + } + + for (i = offsets.length; i--;) { + activeTarget != targets[i] + && scrollTop >= offsets[i] + && (offsets[i + 1] === undefined || scrollTop < offsets[i + 1]) + && this.activate(targets[i]) + } + } + + ScrollSpy.prototype.activate = function (target) { + this.activeTarget = target + + this.clear() + + var selector = this.selector + + '[data-target="' + target + '"],' + + this.selector + '[href="' + target + '"]' + + var active = $(selector) + .parents('li') + .addClass('active') + + if (active.parent('.dropdown-menu').length) { + active = active + .closest('li.dropdown') + .addClass('active') + } + + active.trigger('activate.bs.scrollspy') + } + + ScrollSpy.prototype.clear = function () { + $(this.selector) + .parentsUntil(this.options.target, '.active') + .removeClass('active') + } + + + // SCROLLSPY PLUGIN DEFINITION + // =========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.scrollspy') + var options = typeof option == 'object' && option + + if (!data) $this.data('bs.scrollspy', (data = new ScrollSpy(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.scrollspy + + $.fn.scrollspy = Plugin + $.fn.scrollspy.Constructor = ScrollSpy + + + // SCROLLSPY NO CONFLICT + // ===================== + + $.fn.scrollspy.noConflict = function () { + $.fn.scrollspy = old + return this + } + + + // SCROLLSPY DATA-API + // ================== + + $(window).on('load.bs.scrollspy.data-api', function () { + $('[data-spy="scroll"]').each(function () { + var $spy = $(this) + Plugin.call($spy, $spy.data()) + }) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: tab.js v3.3.7 + * http://getbootstrap.com/javascript/#tabs + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // TAB CLASS DEFINITION + // ==================== + + var Tab = function (element) { + // jscs:disable requireDollarBeforejQueryAssignment + this.element = $(element) + // jscs:enable requireDollarBeforejQueryAssignment + } + + Tab.VERSION = '3.3.7' + + Tab.TRANSITION_DURATION = 150 + + Tab.prototype.show = function () { + var $this = this.element + var $ul = $this.closest('ul:not(.dropdown-menu)') + var selector = $this.data('target') + + if (!selector) { + selector = $this.attr('href') + selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 + } + + if ($this.parent('li').hasClass('active')) return + + var $previous = $ul.find('.active:last a') + var hideEvent = $.Event('hide.bs.tab', { + relatedTarget: $this[0] + }) + var showEvent = $.Event('show.bs.tab', { + relatedTarget: $previous[0] + }) + + $previous.trigger(hideEvent) + $this.trigger(showEvent) + + if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return + + var $target = $(selector) + + this.activate($this.closest('li'), $ul) + this.activate($target, $target.parent(), function () { + $previous.trigger({ + type: 'hidden.bs.tab', + relatedTarget: $this[0] + }) + $this.trigger({ + type: 'shown.bs.tab', + relatedTarget: $previous[0] + }) + }) + } + + Tab.prototype.activate = function (element, container, callback) { + var $active = container.find('> .active') + var transition = callback + && $.support.transition + && ($active.length && $active.hasClass('fade') || !!container.find('> .fade').length) + + function next() { + $active + .removeClass('active') + .find('> .dropdown-menu > .active') + .removeClass('active') + .end() + .find('[data-toggle="tab"]') + .attr('aria-expanded', false) + + element + .addClass('active') + .find('[data-toggle="tab"]') + .attr('aria-expanded', true) + + if (transition) { + element[0].offsetWidth // reflow for transition + element.addClass('in') + } else { + element.removeClass('fade') + } + + if (element.parent('.dropdown-menu').length) { + element + .closest('li.dropdown') + .addClass('active') + .end() + .find('[data-toggle="tab"]') + .attr('aria-expanded', true) + } + + callback && callback() + } + + $active.length && transition ? + $active + .one('bsTransitionEnd', next) + .emulateTransitionEnd(Tab.TRANSITION_DURATION) : + next() + + $active.removeClass('in') + } + + + // TAB PLUGIN DEFINITION + // ===================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.tab') + + if (!data) $this.data('bs.tab', (data = new Tab(this))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.tab + + $.fn.tab = Plugin + $.fn.tab.Constructor = Tab + + + // TAB NO CONFLICT + // =============== + + $.fn.tab.noConflict = function () { + $.fn.tab = old + return this + } + + + // TAB DATA-API + // ============ + + var clickHandler = function (e) { + e.preventDefault() + Plugin.call($(this), 'show') + } + + $(document) + .on('click.bs.tab.data-api', '[data-toggle="tab"]', clickHandler) + .on('click.bs.tab.data-api', '[data-toggle="pill"]', clickHandler) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: affix.js v3.3.7 + * http://getbootstrap.com/javascript/#affix + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // AFFIX CLASS DEFINITION + // ====================== + + var Affix = function (element, options) { + this.options = $.extend({}, Affix.DEFAULTS, options) + + this.$target = $(this.options.target) + .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this)) + .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this)) + + this.$element = $(element) + this.affixed = null + this.unpin = null + this.pinnedOffset = null + + this.checkPosition() + } + + Affix.VERSION = '3.3.7' + + Affix.RESET = 'affix affix-top affix-bottom' + + Affix.DEFAULTS = { + offset: 0, + target: window + } + + Affix.prototype.getState = function (scrollHeight, height, offsetTop, offsetBottom) { + var scrollTop = this.$target.scrollTop() + var position = this.$element.offset() + var targetHeight = this.$target.height() + + if (offsetTop != null && this.affixed == 'top') return scrollTop < offsetTop ? 'top' : false + + if (this.affixed == 'bottom') { + if (offsetTop != null) return (scrollTop + this.unpin <= position.top) ? false : 'bottom' + return (scrollTop + targetHeight <= scrollHeight - offsetBottom) ? false : 'bottom' + } + + var initializing = this.affixed == null + var colliderTop = initializing ? scrollTop : position.top + var colliderHeight = initializing ? targetHeight : height + + if (offsetTop != null && scrollTop <= offsetTop) return 'top' + if (offsetBottom != null && (colliderTop + colliderHeight >= scrollHeight - offsetBottom)) return 'bottom' + + return false + } + + Affix.prototype.getPinnedOffset = function () { + if (this.pinnedOffset) return this.pinnedOffset + this.$element.removeClass(Affix.RESET).addClass('affix') + var scrollTop = this.$target.scrollTop() + var position = this.$element.offset() + return (this.pinnedOffset = position.top - scrollTop) + } + + Affix.prototype.checkPositionWithEventLoop = function () { + setTimeout($.proxy(this.checkPosition, this), 1) + } + + Affix.prototype.checkPosition = function () { + if (!this.$element.is(':visible')) return + + var height = this.$element.height() + var offset = this.options.offset + var offsetTop = offset.top + var offsetBottom = offset.bottom + var scrollHeight = Math.max($(document).height(), $(document.body).height()) + + if (typeof offset != 'object') offsetBottom = offsetTop = offset + if (typeof offsetTop == 'function') offsetTop = offset.top(this.$element) + if (typeof offsetBottom == 'function') offsetBottom = offset.bottom(this.$element) + + var affix = this.getState(scrollHeight, height, offsetTop, offsetBottom) + + if (this.affixed != affix) { + if (this.unpin != null) this.$element.css('top', '') + + var affixType = 'affix' + (affix ? '-' + affix : '') + var e = $.Event(affixType + '.bs.affix') + + this.$element.trigger(e) + + if (e.isDefaultPrevented()) return + + this.affixed = affix + this.unpin = affix == 'bottom' ? this.getPinnedOffset() : null + + this.$element + .removeClass(Affix.RESET) + .addClass(affixType) + .trigger(affixType.replace('affix', 'affixed') + '.bs.affix') + } + + if (affix == 'bottom') { + this.$element.offset({ + top: scrollHeight - height - offsetBottom + }) + } + } + + + // AFFIX PLUGIN DEFINITION + // ======================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.affix') + var options = typeof option == 'object' && option + + if (!data) $this.data('bs.affix', (data = new Affix(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.affix + + $.fn.affix = Plugin + $.fn.affix.Constructor = Affix + + + // AFFIX NO CONFLICT + // ================= + + $.fn.affix.noConflict = function () { + $.fn.affix = old + return this + } + + + // AFFIX DATA-API + // ============== + + $(window).on('load', function () { + $('[data-spy="affix"]').each(function () { + var $spy = $(this) + var data = $spy.data() + + data.offset = data.offset || {} + + if (data.offsetBottom != null) data.offset.bottom = data.offsetBottom + if (data.offsetTop != null) data.offset.top = data.offsetTop + + Plugin.call($spy, data) + }) + }) + +}(jQuery); diff --git a/js/jquery.1.11.1.js b/js/jquery.1.11.1.js new file mode 100644 index 0000000..ab28a24 --- /dev/null +++ b/js/jquery.1.11.1.js @@ -0,0 +1,4 @@ +/*! jQuery v1.11.1 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */ +!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l="1.11.1",m=function(a,b){return new m.fn.init(a,b)},n=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,o=/^-ms-/,p=/-([\da-z])/gi,q=function(a,b){return b.toUpperCase()};m.fn=m.prototype={jquery:l,constructor:m,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=m.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return m.each(this,a,b)},map:function(a){return this.pushStack(m.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},m.extend=m.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||m.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(m.isPlainObject(c)||(b=m.isArray(c)))?(b?(b=!1,f=a&&m.isArray(a)?a:[]):f=a&&m.isPlainObject(a)?a:{},g[d]=m.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},m.extend({expando:"jQuery"+(l+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===m.type(a)},isArray:Array.isArray||function(a){return"array"===m.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){return!m.isArray(a)&&a-parseFloat(a)>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==m.type(a)||a.nodeType||m.isWindow(a))return!1;try{if(a.constructor&&!j.call(a,"constructor")&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(k.ownLast)for(b in a)return j.call(a,b);for(b in a);return void 0===b||j.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(b){b&&m.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(o,"ms-").replace(p,q)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=r(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(n,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(r(Object(a))?m.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(g)return g.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=r(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(f=a[b],b=a,a=f),m.isFunction(a)?(c=d.call(arguments,2),e=function(){return a.apply(b||this,c.concat(d.call(arguments)))},e.guid=a.guid=a.guid||m.guid++,e):void 0},now:function(){return+new Date},support:k}),m.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function r(a){var b=a.length,c=m.type(a);return"function"===c||m.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var s=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+-new Date,v=a.document,w=0,x=0,y=gb(),z=gb(),A=gb(),B=function(a,b){return a===b&&(l=!0),0},C="undefined",D=1<<31,E={}.hasOwnProperty,F=[],G=F.pop,H=F.push,I=F.push,J=F.slice,K=F.indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]===a)return b;return-1},L="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",N="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",O=N.replace("w","w#"),P="\\["+M+"*("+N+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+O+"))|)"+M+"*\\]",Q=":("+N+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+P+")*)|.*)\\)|)",R=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),S=new RegExp("^"+M+"*,"+M+"*"),T=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),U=new RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),V=new RegExp(Q),W=new RegExp("^"+O+"$"),X={ID:new RegExp("^#("+N+")"),CLASS:new RegExp("^\\.("+N+")"),TAG:new RegExp("^("+N.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+Q),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+L+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ab=/[+~]/,bb=/'|\\/g,cb=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),db=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)};try{I.apply(F=J.call(v.childNodes),v.childNodes),F[v.childNodes.length].nodeType}catch(eb){I={apply:F.length?function(a,b){H.apply(a,J.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fb(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],!a||"string"!=typeof a)return d;if(1!==(k=b.nodeType)&&9!==k)return[];if(p&&!e){if(f=_.exec(a))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return I.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName&&b.getElementsByClassName)return I.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=9===k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(bb,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+qb(o[l]);w=ab.test(a)&&ob(b.parentNode)||b,x=o.join(",")}if(x)try{return I.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function gb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function hb(a){return a[u]=!0,a}function ib(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function jb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function kb(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||D)-(~a.sourceIndex||D);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function lb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function mb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function nb(a){return hb(function(b){return b=+b,hb(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function ob(a){return a&&typeof a.getElementsByTagName!==C&&a}c=fb.support={},f=fb.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fb.setDocument=function(a){var b,e=a?a.ownerDocument||a:v,g=e.defaultView;return e!==n&&9===e.nodeType&&e.documentElement?(n=e,o=e.documentElement,p=!f(e),g&&g!==g.top&&(g.addEventListener?g.addEventListener("unload",function(){m()},!1):g.attachEvent&&g.attachEvent("onunload",function(){m()})),c.attributes=ib(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ib(function(a){return a.appendChild(e.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(e.getElementsByClassName)&&ib(function(a){return a.innerHTML="
",a.firstChild.className="i",2===a.getElementsByClassName("i").length}),c.getById=ib(function(a){return o.appendChild(a).id=u,!e.getElementsByName||!e.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if(typeof b.getElementById!==C&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){var c=typeof a.getAttributeNode!==C&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return typeof b.getElementsByTagName!==C?b.getElementsByTagName(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return typeof b.getElementsByClassName!==C&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(e.querySelectorAll))&&(ib(function(a){a.innerHTML="",a.querySelectorAll("[msallowclip^='']").length&&q.push("[*^$]="+M+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+M+"*(?:value|"+L+")"),a.querySelectorAll(":checked").length||q.push(":checked")}),ib(function(a){var b=e.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+M+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ib(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",Q)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===e||a.ownerDocument===v&&t(v,a)?-1:b===e||b.ownerDocument===v&&t(v,b)?1:k?K.call(k,a)-K.call(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,f=a.parentNode,g=b.parentNode,h=[a],i=[b];if(!f||!g)return a===e?-1:b===e?1:f?-1:g?1:k?K.call(k,a)-K.call(k,b):0;if(f===g)return kb(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?kb(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},e):n},fb.matches=function(a,b){return fb(a,null,null,b)},fb.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fb(b,n,null,[a]).length>0},fb.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fb.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&E.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fb.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fb.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fb.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fb.selectors={cacheLength:50,createPseudo:hb,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(cb,db),a[3]=(a[3]||a[4]||a[5]||"").replace(cb,db),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fb.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fb.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(cb,db).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+M+")"+a+"("+M+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||typeof a.getAttribute!==C&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fb.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fb.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?hb(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=K.call(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:hb(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?hb(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:hb(function(a){return function(b){return fb(a,b).length>0}}),contains:hb(function(a){return function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:hb(function(a){return W.test(a||"")||fb.error("unsupported lang: "+a),a=a.replace(cb,db).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:nb(function(){return[0]}),last:nb(function(a,b){return[b-1]}),eq:nb(function(a,b,c){return[0>c?c+b:c]}),even:nb(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:nb(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:nb(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:nb(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function rb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function sb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function tb(a,b,c){for(var d=0,e=b.length;e>d;d++)fb(a,b[d],c);return c}function ub(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function vb(a,b,c,d,e,f){return d&&!d[u]&&(d=vb(d)),e&&!e[u]&&(e=vb(e,f)),hb(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||tb(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ub(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ub(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?K.call(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ub(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):I.apply(g,r)})}function wb(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=rb(function(a){return a===b},h,!0),l=rb(function(a){return K.call(b,a)>-1},h,!0),m=[function(a,c,d){return!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d))}];f>i;i++)if(c=d.relative[a[i].type])m=[rb(sb(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return vb(i>1&&sb(m),i>1&&qb(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&wb(a.slice(i,e)),f>e&&wb(a=a.slice(e)),f>e&&qb(a))}m.push(c)}return sb(m)}function xb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=G.call(i));s=ub(s)}I.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&fb.uniqueSort(i)}return k&&(w=v,j=t),r};return c?hb(f):f}return h=fb.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wb(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xb(e,d)),f.selector=a}return f},i=fb.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(cb,db),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(cb,db),ab.test(j[0].type)&&ob(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qb(j),!a)return I.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,ab.test(a)&&ob(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ib(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ib(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||jb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ib(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||jb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ib(function(a){return null==a.getAttribute("disabled")})||jb(L,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fb}(a);m.find=s,m.expr=s.selectors,m.expr[":"]=m.expr.pseudos,m.unique=s.uniqueSort,m.text=s.getText,m.isXMLDoc=s.isXML,m.contains=s.contains;var t=m.expr.match.needsContext,u=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,v=/^.[^:#\[\.,]*$/;function w(a,b,c){if(m.isFunction(b))return m.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return m.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(v.test(b))return m.filter(b,a,c);b=m.filter(b,a)}return m.grep(a,function(a){return m.inArray(a,b)>=0!==c})}m.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?m.find.matchesSelector(d,a)?[d]:[]:m.find.matches(a,m.grep(b,function(a){return 1===a.nodeType}))},m.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(m(a).filter(function(){for(b=0;e>b;b++)if(m.contains(d[b],this))return!0}));for(b=0;e>b;b++)m.find(a,d[b],c);return c=this.pushStack(e>1?m.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(w(this,a||[],!1))},not:function(a){return this.pushStack(w(this,a||[],!0))},is:function(a){return!!w(this,"string"==typeof a&&t.test(a)?m(a):a||[],!1).length}});var x,y=a.document,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=m.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||x).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof m?b[0]:b,m.merge(this,m.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:y,!0)),u.test(c[1])&&m.isPlainObject(b))for(c in b)m.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}if(d=y.getElementById(c[2]),d&&d.parentNode){if(d.id!==c[2])return x.find(a);this.length=1,this[0]=d}return this.context=y,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):m.isFunction(a)?"undefined"!=typeof x.ready?x.ready(a):a(m):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),m.makeArray(a,this))};A.prototype=m.fn,x=m(y);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};m.extend({dir:function(a,b,c){var d=[],e=a[b];while(e&&9!==e.nodeType&&(void 0===c||1!==e.nodeType||!m(e).is(c)))1===e.nodeType&&d.push(e),e=e[b];return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),m.fn.extend({has:function(a){var b,c=m(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(m.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=t.test(a)||"string"!=typeof a?m(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&m.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?m.unique(f):f)},index:function(a){return a?"string"==typeof a?m.inArray(this[0],m(a)):m.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(m.unique(m.merge(this.get(),m(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}m.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return m.dir(a,"parentNode")},parentsUntil:function(a,b,c){return m.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return m.dir(a,"nextSibling")},prevAll:function(a){return m.dir(a,"previousSibling")},nextUntil:function(a,b,c){return m.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return m.dir(a,"previousSibling",c)},siblings:function(a){return m.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return m.sibling(a.firstChild)},contents:function(a){return m.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:m.merge([],a.childNodes)}},function(a,b){m.fn[a]=function(c,d){var e=m.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=m.filter(d,e)),this.length>1&&(C[a]||(e=m.unique(e)),B.test(a)&&(e=e.reverse())),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return m.each(a.match(E)||[],function(a,c){b[c]=!0}),b}m.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):m.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(c=a.memory&&l,d=!0,f=g||0,g=0,e=h.length,b=!0;h&&e>f;f++)if(h[f].apply(l[0],l[1])===!1&&a.stopOnFalse){c=!1;break}b=!1,h&&(i?i.length&&j(i.shift()):c?h=[]:k.disable())},k={add:function(){if(h){var d=h.length;!function f(b){m.each(b,function(b,c){var d=m.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),b?e=h.length:c&&(g=d,j(c))}return this},remove:function(){return h&&m.each(arguments,function(a,c){var d;while((d=m.inArray(c,h,d))>-1)h.splice(d,1),b&&(e>=d&&e--,f>=d&&f--)}),this},has:function(a){return a?m.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],e=0,this},disable:function(){return h=i=c=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,c||k.disable(),this},locked:function(){return!i},fireWith:function(a,c){return!h||d&&!i||(c=c||[],c=[a,c.slice?c.slice():c],b?i.push(c):j(c)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!d}};return k},m.extend({Deferred:function(a){var b=[["resolve","done",m.Callbacks("once memory"),"resolved"],["reject","fail",m.Callbacks("once memory"),"rejected"],["notify","progress",m.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return m.Deferred(function(c){m.each(b,function(b,f){var g=m.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&m.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?m.extend(a,d):d}},e={};return d.pipe=d.then,m.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&m.isFunction(a.promise)?e:0,g=1===f?a:m.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&m.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;m.fn.ready=function(a){return m.ready.promise().done(a),this},m.extend({isReady:!1,readyWait:1,holdReady:function(a){a?m.readyWait++:m.ready(!0)},ready:function(a){if(a===!0?!--m.readyWait:!m.isReady){if(!y.body)return setTimeout(m.ready);m.isReady=!0,a!==!0&&--m.readyWait>0||(H.resolveWith(y,[m]),m.fn.triggerHandler&&(m(y).triggerHandler("ready"),m(y).off("ready")))}}});function I(){y.addEventListener?(y.removeEventListener("DOMContentLoaded",J,!1),a.removeEventListener("load",J,!1)):(y.detachEvent("onreadystatechange",J),a.detachEvent("onload",J))}function J(){(y.addEventListener||"load"===event.type||"complete"===y.readyState)&&(I(),m.ready())}m.ready.promise=function(b){if(!H)if(H=m.Deferred(),"complete"===y.readyState)setTimeout(m.ready);else if(y.addEventListener)y.addEventListener("DOMContentLoaded",J,!1),a.addEventListener("load",J,!1);else{y.attachEvent("onreadystatechange",J),a.attachEvent("onload",J);var c=!1;try{c=null==a.frameElement&&y.documentElement}catch(d){}c&&c.doScroll&&!function e(){if(!m.isReady){try{c.doScroll("left")}catch(a){return setTimeout(e,50)}I(),m.ready()}}()}return H.promise(b)};var K="undefined",L;for(L in m(k))break;k.ownLast="0"!==L,k.inlineBlockNeedsLayout=!1,m(function(){var a,b,c,d;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",k.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(d))}),function(){var a=y.createElement("div");if(null==k.deleteExpando){k.deleteExpando=!0;try{delete a.test}catch(b){k.deleteExpando=!1}}a=null}(),m.acceptData=function(a){var b=m.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b};var M=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,N=/([A-Z])/g;function O(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(N,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:M.test(c)?m.parseJSON(c):c}catch(e){}m.data(a,b,c)}else c=void 0}return c}function P(a){var b;for(b in a)if(("data"!==b||!m.isEmptyObject(a[b]))&&"toJSON"!==b)return!1;return!0}function Q(a,b,d,e){if(m.acceptData(a)){var f,g,h=m.expando,i=a.nodeType,j=i?m.cache:a,k=i?a[h]:a[h]&&h; +if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||m.guid++:h),j[k]||(j[k]=i?{}:{toJSON:m.noop}),("object"==typeof b||"function"==typeof b)&&(e?j[k]=m.extend(j[k],b):j[k].data=m.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[m.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[m.camelCase(b)])):f=g,f}}function R(a,b,c){if(m.acceptData(a)){var d,e,f=a.nodeType,g=f?m.cache:a,h=f?a[m.expando]:m.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){m.isArray(b)?b=b.concat(m.map(b,m.camelCase)):b in d?b=[b]:(b=m.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!P(d):!m.isEmptyObject(d))return}(c||(delete g[h].data,P(g[h])))&&(f?m.cleanData([a],!0):k.deleteExpando||g!=g.window?delete g[h]:g[h]=null)}}}m.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?m.cache[a[m.expando]]:a[m.expando],!!a&&!P(a)},data:function(a,b,c){return Q(a,b,c)},removeData:function(a,b){return R(a,b)},_data:function(a,b,c){return Q(a,b,c,!0)},_removeData:function(a,b){return R(a,b,!0)}}),m.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=m.data(f),1===f.nodeType&&!m._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=m.camelCase(d.slice(5)),O(f,d,e[d])));m._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){m.data(this,a)}):arguments.length>1?this.each(function(){m.data(this,a,b)}):f?O(f,a,m.data(f,a)):void 0},removeData:function(a){return this.each(function(){m.removeData(this,a)})}}),m.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=m._data(a,b),c&&(!d||m.isArray(c)?d=m._data(a,b,m.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=m.queue(a,b),d=c.length,e=c.shift(),f=m._queueHooks(a,b),g=function(){m.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return m._data(a,c)||m._data(a,c,{empty:m.Callbacks("once memory").add(function(){m._removeData(a,b+"queue"),m._removeData(a,c)})})}}),m.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.lengthh;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},W=/^(?:checkbox|radio)$/i;!function(){var a=y.createElement("input"),b=y.createElement("div"),c=y.createDocumentFragment();if(b.innerHTML="
a",k.leadingWhitespace=3===b.firstChild.nodeType,k.tbody=!b.getElementsByTagName("tbody").length,k.htmlSerialize=!!b.getElementsByTagName("link").length,k.html5Clone="<:nav>"!==y.createElement("nav").cloneNode(!0).outerHTML,a.type="checkbox",a.checked=!0,c.appendChild(a),k.appendChecked=a.checked,b.innerHTML="",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue,c.appendChild(b),b.innerHTML="",k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,k.noCloneEvent=!0,b.attachEvent&&(b.attachEvent("onclick",function(){k.noCloneEvent=!1}),b.cloneNode(!0).click()),null==k.deleteExpando){k.deleteExpando=!0;try{delete b.test}catch(d){k.deleteExpando=!1}}}(),function(){var b,c,d=y.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(k[b+"Bubbles"]=c in a)||(d.setAttribute(c,"t"),k[b+"Bubbles"]=d.attributes[c].expando===!1);d=null}();var X=/^(?:input|select|textarea)$/i,Y=/^key/,Z=/^(?:mouse|pointer|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=/^([^.]*)(?:\.(.+)|)$/;function ab(){return!0}function bb(){return!1}function cb(){try{return y.activeElement}catch(a){}}m.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=m.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return typeof m===K||a&&m.event.triggered===a.type?void 0:m.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(E)||[""],h=b.length;while(h--)f=_.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=m.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=m.event.special[o]||{},l=m.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&m.expr.match.needsContext.test(e),namespace:p.join(".")},i),(n=g[o])||(n=g[o]=[],n.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?n.splice(n.delegateCount++,0,l):n.push(l),m.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m.hasData(a)&&m._data(a);if(r&&(k=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=_.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=m.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,n=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=n.length;while(f--)g=n[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(n.splice(f,1),g.selector&&n.delegateCount--,l.remove&&l.remove.call(a,g));i&&!n.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||m.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)m.event.remove(a,o+b[j],c,d,!0);m.isEmptyObject(k)&&(delete r.handle,m._removeData(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,o=[d||y],p=j.call(b,"type")?b.type:b,q=j.call(b,"namespace")?b.namespace.split("."):[];if(h=l=d=d||y,3!==d.nodeType&&8!==d.nodeType&&!$.test(p+m.event.triggered)&&(p.indexOf(".")>=0&&(q=p.split("."),p=q.shift(),q.sort()),g=p.indexOf(":")<0&&"on"+p,b=b[m.expando]?b:new m.Event(p,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=q.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:m.makeArray(c,[b]),k=m.event.special[p]||{},e||!k.trigger||k.trigger.apply(d,c)!==!1)){if(!e&&!k.noBubble&&!m.isWindow(d)){for(i=k.delegateType||p,$.test(i+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),l=h;l===(d.ownerDocument||y)&&o.push(l.defaultView||l.parentWindow||a)}n=0;while((h=o[n++])&&!b.isPropagationStopped())b.type=n>1?i:k.bindType||p,f=(m._data(h,"events")||{})[b.type]&&m._data(h,"handle"),f&&f.apply(h,c),f=g&&h[g],f&&f.apply&&m.acceptData(h)&&(b.result=f.apply(h,c),b.result===!1&&b.preventDefault());if(b.type=p,!e&&!b.isDefaultPrevented()&&(!k._default||k._default.apply(o.pop(),c)===!1)&&m.acceptData(d)&&g&&d[p]&&!m.isWindow(d)){l=d[g],l&&(d[g]=null),m.event.triggered=p;try{d[p]()}catch(r){}m.event.triggered=void 0,l&&(d[g]=l)}return b.result}},dispatch:function(a){a=m.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(m._data(this,"events")||{})[a.type]||[],k=m.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=m.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,g=0;while((e=f.handlers[g++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(e.namespace))&&(a.handleObj=e,a.data=e.data,c=((m.event.special[e.origType]||{}).handle||e.handler).apply(f.elem,i),void 0!==c&&(a.result=c)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(e=[],f=0;h>f;f++)d=b[f],c=d.selector+" ",void 0===e[c]&&(e[c]=d.needsContext?m(c,this).index(i)>=0:m.find(c,this,null,[i]).length),e[c]&&e.push(d);e.length&&g.push({elem:i,handlers:e})}return h]","i"),hb=/^\s+/,ib=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,jb=/<([\w:]+)/,kb=/\s*$/g,rb={option:[1,""],legend:[1,"
","
"],area:[1,"",""],param:[1,"",""],thead:[1,"","
"],tr:[2,"","
"],col:[2,"","
"],td:[3,"","
"],_default:k.htmlSerialize?[0,"",""]:[1,"X
","
"]},sb=db(y),tb=sb.appendChild(y.createElement("div"));rb.optgroup=rb.option,rb.tbody=rb.tfoot=rb.colgroup=rb.caption=rb.thead,rb.th=rb.td;function ub(a,b){var c,d,e=0,f=typeof a.getElementsByTagName!==K?a.getElementsByTagName(b||"*"):typeof a.querySelectorAll!==K?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||m.nodeName(d,b)?f.push(d):m.merge(f,ub(d,b));return void 0===b||b&&m.nodeName(a,b)?m.merge([a],f):f}function vb(a){W.test(a.type)&&(a.defaultChecked=a.checked)}function wb(a,b){return m.nodeName(a,"table")&&m.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function xb(a){return a.type=(null!==m.find.attr(a,"type"))+"/"+a.type,a}function yb(a){var b=pb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function zb(a,b){for(var c,d=0;null!=(c=a[d]);d++)m._data(c,"globalEval",!b||m._data(b[d],"globalEval"))}function Ab(a,b){if(1===b.nodeType&&m.hasData(a)){var c,d,e,f=m._data(a),g=m._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)m.event.add(b,c,h[c][d])}g.data&&(g.data=m.extend({},g.data))}}function Bb(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!k.noCloneEvent&&b[m.expando]){e=m._data(b);for(d in e.events)m.removeEvent(b,d,e.handle);b.removeAttribute(m.expando)}"script"===c&&b.text!==a.text?(xb(b).text=a.text,yb(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),k.html5Clone&&a.innerHTML&&!m.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&W.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}}m.extend({clone:function(a,b,c){var d,e,f,g,h,i=m.contains(a.ownerDocument,a);if(k.html5Clone||m.isXMLDoc(a)||!gb.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(tb.innerHTML=a.outerHTML,tb.removeChild(f=tb.firstChild)),!(k.noCloneEvent&&k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||m.isXMLDoc(a)))for(d=ub(f),h=ub(a),g=0;null!=(e=h[g]);++g)d[g]&&Bb(e,d[g]);if(b)if(c)for(h=h||ub(a),d=d||ub(f),g=0;null!=(e=h[g]);g++)Ab(e,d[g]);else Ab(a,f);return d=ub(f,"script"),d.length>0&&zb(d,!i&&ub(a,"script")),d=h=e=null,f},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,l,n=a.length,o=db(b),p=[],q=0;n>q;q++)if(f=a[q],f||0===f)if("object"===m.type(f))m.merge(p,f.nodeType?[f]:f);else if(lb.test(f)){h=h||o.appendChild(b.createElement("div")),i=(jb.exec(f)||["",""])[1].toLowerCase(),l=rb[i]||rb._default,h.innerHTML=l[1]+f.replace(ib,"<$1>")+l[2],e=l[0];while(e--)h=h.lastChild;if(!k.leadingWhitespace&&hb.test(f)&&p.push(b.createTextNode(hb.exec(f)[0])),!k.tbody){f="table"!==i||kb.test(f)?""!==l[1]||kb.test(f)?0:h:h.firstChild,e=f&&f.childNodes.length;while(e--)m.nodeName(j=f.childNodes[e],"tbody")&&!j.childNodes.length&&f.removeChild(j)}m.merge(p,h.childNodes),h.textContent="";while(h.firstChild)h.removeChild(h.firstChild);h=o.lastChild}else p.push(b.createTextNode(f));h&&o.removeChild(h),k.appendChecked||m.grep(ub(p,"input"),vb),q=0;while(f=p[q++])if((!d||-1===m.inArray(f,d))&&(g=m.contains(f.ownerDocument,f),h=ub(o.appendChild(f),"script"),g&&zb(h),c)){e=0;while(f=h[e++])ob.test(f.type||"")&&c.push(f)}return h=null,o},cleanData:function(a,b){for(var d,e,f,g,h=0,i=m.expando,j=m.cache,l=k.deleteExpando,n=m.event.special;null!=(d=a[h]);h++)if((b||m.acceptData(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)n[e]?m.event.remove(d,e):m.removeEvent(d,e,g.handle);j[f]&&(delete j[f],l?delete d[i]:typeof d.removeAttribute!==K?d.removeAttribute(i):d[i]=null,c.push(f))}}}),m.fn.extend({text:function(a){return V(this,function(a){return void 0===a?m.text(this):this.empty().append((this[0]&&this[0].ownerDocument||y).createTextNode(a))},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?m.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||m.cleanData(ub(c)),c.parentNode&&(b&&m.contains(c.ownerDocument,c)&&zb(ub(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&m.cleanData(ub(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&m.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return m.clone(this,a,b)})},html:function(a){return V(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(fb,""):void 0;if(!("string"!=typeof a||mb.test(a)||!k.htmlSerialize&&gb.test(a)||!k.leadingWhitespace&&hb.test(a)||rb[(jb.exec(a)||["",""])[1].toLowerCase()])){a=a.replace(ib,"<$1>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(m.cleanData(ub(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,m.cleanData(ub(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,n=this,o=l-1,p=a[0],q=m.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&nb.test(p))return this.each(function(c){var d=n.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(i=m.buildFragment(a,this[0].ownerDocument,!1,this),c=i.firstChild,1===i.childNodes.length&&(i=c),c)){for(g=m.map(ub(i,"script"),xb),f=g.length;l>j;j++)d=i,j!==o&&(d=m.clone(d,!0,!0),f&&m.merge(g,ub(d,"script"))),b.call(this[j],d,j);if(f)for(h=g[g.length-1].ownerDocument,m.map(g,yb),j=0;f>j;j++)d=g[j],ob.test(d.type||"")&&!m._data(d,"globalEval")&&m.contains(h,d)&&(d.src?m._evalUrl&&m._evalUrl(d.src):m.globalEval((d.text||d.textContent||d.innerHTML||"").replace(qb,"")));i=c=null}return this}}),m.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){m.fn[a]=function(a){for(var c,d=0,e=[],g=m(a),h=g.length-1;h>=d;d++)c=d===h?this:this.clone(!0),m(g[d])[b](c),f.apply(e,c.get());return this.pushStack(e)}});var Cb,Db={};function Eb(b,c){var d,e=m(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:m.css(e[0],"display");return e.detach(),f}function Fb(a){var b=y,c=Db[a];return c||(c=Eb(a,b),"none"!==c&&c||(Cb=(Cb||m("