From b2a77f06b9be50fd4ffd0590806b34ca52c26884 Mon Sep 17 00:00:00 2001
From: Sam Judelson <64875465+sjud@users.noreply.github.com>
Date: Mon, 6 May 2024 08:48:09 -0400
Subject: [PATCH] projects: OpenAPI Utopia (#2556)
---
.../openapi-openai-api-swagger-ui/.gitignore | 15 +
.../openapi-openai-api-swagger-ui/Cargo.toml | 117 ++++++++
.../openapi-openai-api-swagger-ui/LICENSE | 24 ++
.../openapi-openai-api-swagger-ui/README.md | 15 +
.../end2end/package-lock.json | 74 +++++
.../end2end/package.json | 13 +
.../end2end/playwright.config.ts | 107 +++++++
.../end2end/tests/example.spec.ts | 9 +
.../public/favicon.ico | Bin 0 -> 15406 bytes
.../rust-toolchain.toml | 3 +
.../openapi-openai-api-swagger-ui/src/app.rs | 174 ++++++++++++
.../src/error_template.rs | 72 +++++
.../src/fileserv.rs | 42 +++
.../openapi-openai-api-swagger-ui/src/lib.rs | 27 ++
.../openapi-openai-api-swagger-ui/src/main.rs | 42 +++
.../src/open_ai.rs | 267 ++++++++++++++++++
.../style/main.scss | 4 +
17 files changed, 1005 insertions(+)
create mode 100644 projects/openapi-openai-api-swagger-ui/.gitignore
create mode 100644 projects/openapi-openai-api-swagger-ui/Cargo.toml
create mode 100644 projects/openapi-openai-api-swagger-ui/LICENSE
create mode 100644 projects/openapi-openai-api-swagger-ui/README.md
create mode 100644 projects/openapi-openai-api-swagger-ui/end2end/package-lock.json
create mode 100644 projects/openapi-openai-api-swagger-ui/end2end/package.json
create mode 100644 projects/openapi-openai-api-swagger-ui/end2end/playwright.config.ts
create mode 100644 projects/openapi-openai-api-swagger-ui/end2end/tests/example.spec.ts
create mode 100644 projects/openapi-openai-api-swagger-ui/public/favicon.ico
create mode 100644 projects/openapi-openai-api-swagger-ui/rust-toolchain.toml
create mode 100644 projects/openapi-openai-api-swagger-ui/src/app.rs
create mode 100644 projects/openapi-openai-api-swagger-ui/src/error_template.rs
create mode 100644 projects/openapi-openai-api-swagger-ui/src/fileserv.rs
create mode 100644 projects/openapi-openai-api-swagger-ui/src/lib.rs
create mode 100644 projects/openapi-openai-api-swagger-ui/src/main.rs
create mode 100644 projects/openapi-openai-api-swagger-ui/src/open_ai.rs
create mode 100644 projects/openapi-openai-api-swagger-ui/style/main.scss
diff --git a/projects/openapi-openai-api-swagger-ui/.gitignore b/projects/openapi-openai-api-swagger-ui/.gitignore
new file mode 100644
index 000000000..81250b95c
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/.gitignore
@@ -0,0 +1,15 @@
+# Generated by Cargo
+# will have compiled files and executables
+/target/
+pkg
+
+# These are backup files generated by rustfmt
+**/*.rs.bk
+
+# node e2e test tools and outputs
+node_modules/
+test-results/
+end2end/playwright-report/
+playwright/.cache/
+
+.secret_key
\ No newline at end of file
diff --git a/projects/openapi-openai-api-swagger-ui/Cargo.toml b/projects/openapi-openai-api-swagger-ui/Cargo.toml
new file mode 100644
index 000000000..9a21d8ccc
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/Cargo.toml
@@ -0,0 +1,117 @@
+[package]
+name = "openapi-openai-api-swagger-ui"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+crate-type = ["cdylib", "rlib"]
+
+[dependencies]
+axum = { version = "0.7", optional = true }
+console_error_panic_hook = "0.1"
+leptos = { version = "0.6", features = ["nightly"] }
+leptos_axum = { version = "0.6", optional = true }
+leptos_meta = { version = "0.6", features = ["nightly"] }
+leptos_router = { version = "0.6", features = ["nightly"] }
+tokio = { version = "1", features = ["rt-multi-thread"], optional = true }
+tower = { version = "0.4", optional = true }
+tower-http = { version = "0.5", features = ["fs"], optional = true }
+wasm-bindgen = "=0.2.92"
+thiserror = "1"
+tracing = { version = "0.1", optional = true }
+utoipa = { version = "4.2.0", optional = true, features=["debug"] }
+utoipa-swagger-ui = { version = "6.0.0", optional = true , features = ["axum"]}
+http = "1"
+serde = "1.0.198"
+serde_json = {version = "1.0.116", optional = true}
+openai_dive = {version="0.4.7",optional=true}
+reqwest = "0.12.4"
+uuid = { version = "1.8.0", features = ["v4"]}
+
+[features]
+hydrate = ["leptos/hydrate", "leptos_meta/hydrate", "leptos_router/hydrate"]
+ssr = [
+ "dep:openai_dive",
+ "dep:serde_json",
+ "dep:utoipa-swagger-ui",
+ "dep:utoipa",
+ "dep:axum",
+ "dep:tokio",
+ "dep:tower",
+ "dep:tower-http",
+ "dep:leptos_axum",
+ "leptos/ssr",
+ "leptos_meta/ssr",
+ "leptos_router/ssr",
+ "dep:tracing",
+]
+
+# Defines a size-optimized profile for the WASM bundle in release mode
+[profile.wasm-release]
+inherits = "release"
+opt-level = 'z'
+lto = true
+codegen-units = 1
+panic = "abort"
+
+[package.metadata.leptos]
+# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
+output-name = "openapi-swagger-ui"
+
+# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
+site-root = "target/site"
+
+# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
+# Defaults to pkg
+site-pkg-dir = "pkg"
+
+# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to //app.css
+style-file = "style/main.scss"
+# Assets source dir. All files found here will be copied and synchronized to site-root.
+# The assets-dir cannot have a sub directory with the same name/path as site-pkg-dir.
+#
+# Optional. Env: LEPTOS_ASSETS_DIR.
+assets-dir = "public"
+
+# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
+site-addr = "127.0.0.1:3000"
+
+# The port to use for automatic reload monitoring
+reload-port = 3001
+
+# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
+# [Windows] for non-WSL use "npx.cmd playwright test"
+# This binary name can be checked in Powershell with Get-Command npx
+end2end-cmd = "npx playwright test"
+end2end-dir = "end2end"
+
+# The browserlist query used for optimizing the CSS.
+browserquery = "defaults"
+
+# The environment Leptos will run in, usually either "DEV" or "PROD"
+env = "DEV"
+
+# The features to use when compiling the bin target
+#
+# Optional. Can be over-ridden with the command line parameter --bin-features
+bin-features = ["ssr"]
+
+# If the --no-default-features flag should be used when compiling the bin target
+#
+# Optional. Defaults to false.
+bin-default-features = false
+
+# The features to use when compiling the lib target
+#
+# Optional. Can be over-ridden with the command line parameter --lib-features
+lib-features = ["hydrate"]
+
+# If the --no-default-features flag should be used when compiling the lib target
+#
+# Optional. Defaults to false.
+lib-default-features = false
+
+# The profile to use for the lib target when compiling for release
+#
+# Optional. Defaults to "release".
+lib-profile-release = "wasm-release"
diff --git a/projects/openapi-openai-api-swagger-ui/LICENSE b/projects/openapi-openai-api-swagger-ui/LICENSE
new file mode 100644
index 000000000..fdddb29aa
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/LICENSE
@@ -0,0 +1,24 @@
+This is free and unencumbered software released into the public domain.
+
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
+
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+For more information, please refer to
diff --git a/projects/openapi-openai-api-swagger-ui/README.md b/projects/openapi-openai-api-swagger-ui/README.md
new file mode 100644
index 000000000..b10038c64
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/README.md
@@ -0,0 +1,15 @@
+#OpenAPI Swagger-Ui OpenAI GPT
+
+This example shows how to document server functions via OpenAPI schema generated using Utoipa and serve the swagger ui via /swagger-ui endpoint. More than that, this example shows how to take said OpenAPI spec and turn it into a function list to feed to OpenAI's chat completion endpoint to generate the JSON values to feed back into our server functions.
+
+The example shows an input and if you tell it to do something that is covered, say hello, or generate a list of names it will do that.
+
+To use the AI part of this project provide your openAPI key in an environment variable when running cargo leptos.
+
+```sh
+OPENAI_API_KEY=my_secret_key cargo leptos serve
+```
+
+
+## Thoughts, Feedback, Criticism, Comments?
+Send me any of the above, I'm @sjud on leptos discord. I'm always looking to improve and make these projects more helpful for the community. So please let me know how I can do that. Thanks!
\ No newline at end of file
diff --git a/projects/openapi-openai-api-swagger-ui/end2end/package-lock.json b/projects/openapi-openai-api-swagger-ui/end2end/package-lock.json
new file mode 100644
index 000000000..f12af4425
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/end2end/package-lock.json
@@ -0,0 +1,74 @@
+{
+ "name": "end2end",
+ "version": "1.0.0",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "end2end",
+ "version": "1.0.0",
+ "license": "ISC",
+ "devDependencies": {
+ "@playwright/test": "^1.28.0"
+ }
+ },
+ "node_modules/@playwright/test": {
+ "version": "1.28.0",
+ "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.28.0.tgz",
+ "integrity": "sha512-vrHs5DFTPwYox5SGKq/7TDn/S4q6RA1zArd7uhO6EyP9hj3XgZBBM12ktMbnDQNxh/fL1IUKsTNLxihmsU38lQ==",
+ "dev": true,
+ "dependencies": {
+ "@types/node": "*",
+ "playwright-core": "1.28.0"
+ },
+ "bin": {
+ "playwright": "cli.js"
+ },
+ "engines": {
+ "node": ">=14"
+ }
+ },
+ "node_modules/@types/node": {
+ "version": "18.11.9",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz",
+ "integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==",
+ "dev": true
+ },
+ "node_modules/playwright-core": {
+ "version": "1.28.0",
+ "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.28.0.tgz",
+ "integrity": "sha512-nJLknd28kPBiCNTbqpu6Wmkrh63OEqJSFw9xOfL9qxfNwody7h6/L3O2dZoWQ6Oxcm0VOHjWmGiCUGkc0X3VZA==",
+ "dev": true,
+ "bin": {
+ "playwright": "cli.js"
+ },
+ "engines": {
+ "node": ">=14"
+ }
+ }
+ },
+ "dependencies": {
+ "@playwright/test": {
+ "version": "1.28.0",
+ "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.28.0.tgz",
+ "integrity": "sha512-vrHs5DFTPwYox5SGKq/7TDn/S4q6RA1zArd7uhO6EyP9hj3XgZBBM12ktMbnDQNxh/fL1IUKsTNLxihmsU38lQ==",
+ "dev": true,
+ "requires": {
+ "@types/node": "*",
+ "playwright-core": "1.28.0"
+ }
+ },
+ "@types/node": {
+ "version": "18.11.9",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz",
+ "integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==",
+ "dev": true
+ },
+ "playwright-core": {
+ "version": "1.28.0",
+ "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.28.0.tgz",
+ "integrity": "sha512-nJLknd28kPBiCNTbqpu6Wmkrh63OEqJSFw9xOfL9qxfNwody7h6/L3O2dZoWQ6Oxcm0VOHjWmGiCUGkc0X3VZA==",
+ "dev": true
+ }
+ }
+}
diff --git a/projects/openapi-openai-api-swagger-ui/end2end/package.json b/projects/openapi-openai-api-swagger-ui/end2end/package.json
new file mode 100644
index 000000000..ed785859f
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/end2end/package.json
@@ -0,0 +1,13 @@
+{
+ "name": "end2end",
+ "version": "1.0.0",
+ "description": "",
+ "main": "index.js",
+ "scripts": {},
+ "keywords": [],
+ "author": "",
+ "license": "ISC",
+ "devDependencies": {
+ "@playwright/test": "^1.28.0"
+ }
+}
diff --git a/projects/openapi-openai-api-swagger-ui/end2end/playwright.config.ts b/projects/openapi-openai-api-swagger-ui/end2end/playwright.config.ts
new file mode 100644
index 000000000..e9891c094
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/end2end/playwright.config.ts
@@ -0,0 +1,107 @@
+import type { PlaywrightTestConfig } from "@playwright/test";
+import { devices } from "@playwright/test";
+
+/**
+ * Read environment variables from file.
+ * https://github.com/motdotla/dotenv
+ */
+// require('dotenv').config();
+
+/**
+ * See https://playwright.dev/docs/test-configuration.
+ */
+const config: PlaywrightTestConfig = {
+ testDir: "./tests",
+ /* Maximum time one test can run for. */
+ timeout: 30 * 1000,
+ expect: {
+ /**
+ * Maximum time expect() should wait for the condition to be met.
+ * For example in `await expect(locator).toHaveText();`
+ */
+ timeout: 5000,
+ },
+ /* Run tests in files in parallel */
+ fullyParallel: true,
+ /* Fail the build on CI if you accidentally left test.only in the source code. */
+ forbidOnly: !!process.env.CI,
+ /* Retry on CI only */
+ retries: process.env.CI ? 2 : 0,
+ /* Opt out of parallel tests on CI. */
+ workers: process.env.CI ? 1 : undefined,
+ /* Reporter to use. See https://playwright.dev/docs/test-reporters */
+ reporter: "html",
+ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
+ use: {
+ /* Maximum time each action such as `click()` can take. Defaults to 0 (no limit). */
+ actionTimeout: 0,
+ /* Base URL to use in actions like `await page.goto('/')`. */
+ // baseURL: 'http://localhost:3000',
+
+ /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
+ trace: "on-first-retry",
+ },
+
+ /* Configure projects for major browsers */
+ projects: [
+ {
+ name: "chromium",
+ use: {
+ ...devices["Desktop Chrome"],
+ },
+ },
+
+ {
+ name: "firefox",
+ use: {
+ ...devices["Desktop Firefox"],
+ },
+ },
+
+ {
+ name: "webkit",
+ use: {
+ ...devices["Desktop Safari"],
+ },
+ },
+
+ /* Test against mobile viewports. */
+ // {
+ // name: 'Mobile Chrome',
+ // use: {
+ // ...devices['Pixel 5'],
+ // },
+ // },
+ // {
+ // name: 'Mobile Safari',
+ // use: {
+ // ...devices['iPhone 12'],
+ // },
+ // },
+
+ /* Test against branded browsers. */
+ // {
+ // name: 'Microsoft Edge',
+ // use: {
+ // channel: 'msedge',
+ // },
+ // },
+ // {
+ // name: 'Google Chrome',
+ // use: {
+ // channel: 'chrome',
+ // },
+ // },
+ ],
+
+ /* Folder for test artifacts such as screenshots, videos, traces, etc. */
+ // outputDir: 'test-results/',
+
+ /* Run your local dev server before starting the tests */
+ // webServer: {
+ // command: 'npm run start',
+ // port: 3000,
+ // },
+};
+
+export default config;
diff --git a/projects/openapi-openai-api-swagger-ui/end2end/tests/example.spec.ts b/projects/openapi-openai-api-swagger-ui/end2end/tests/example.spec.ts
new file mode 100644
index 000000000..a461f351a
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/end2end/tests/example.spec.ts
@@ -0,0 +1,9 @@
+import { test, expect } from "@playwright/test";
+
+test("homepage has title and links to intro page", async ({ page }) => {
+ await page.goto("http://localhost:3000/");
+
+ await expect(page).toHaveTitle("Welcome to Leptos");
+
+ await expect(page.locator("h1")).toHaveText("Welcome to Leptos!");
+});
diff --git a/projects/openapi-openai-api-swagger-ui/public/favicon.ico b/projects/openapi-openai-api-swagger-ui/public/favicon.ico
new file mode 100644
index 0000000000000000000000000000000000000000..2ba8527cb12f5f28f331b8d361eef560492d4c77
GIT binary patch
literal 15406
zcmeHOd3aPs5`TblWD*3D%tXPJ#q(n!z$P=3gCjvf#a)E}a;Uf>h{pmVih!a-5LVO`
zB?JrzEFicD0wRLo0iPfO372xnkvkzFlRHB)lcTnNZ}KK@US{UKN#b8?e_zkLy1RZ=
zT~*y(-6IICgf>E_P6A)M3(wvl2qr-gx_5Ux-_uzT*6_Q&ee1v9B?vzS3&K5IhO2N5
z$9ukLN<`G>$$|GLnga~y%>f}*j%+w@(ixVUb^1_Gjoc;(?TrD3m2)RduFblVN)uy;
zQAEd^T{5>-YYH%|Kv{V^cxHMBr1Ik7Frht$imC`rqx@5*|
z+OqN!xAjqmaU=qR$uGDMa7p!W9oZ+64($4xDk^FyFQ<_9Z`(;DLnB<;LLJD1<&vnZ
zo0(>zIkQTse}qNMb6+i`th54(3pKm8;UAJ<_BULR*Z=m5FU7jiW(l+}WkHZ|e@1
z`pm;Q^pCuLUQUrnQ(hPM10pSSHQS=Bf8DqG1&!-B!oQQ|FuzLruL1w(+g<8&znyI?
zzX-}?SwUvNjEuT?7uUOy{Fb@xKklpj+jdYM^IK9}NxvLRZd{l9FHEQJ4IO~q%4I0O
zAN|*8x^nIU4Giw?f*tmNx=7H)2-Zn?J^B6SgpcW3ZXV_57Sn%Mtfr_=w|sYpAhdJT
zcKo6Z*oIOU(az~3$LOEWm9Q)dYWMA}T7L23MVGqrcA%4H)+^`+=j+Hh8CTCnnG2Rh
zgcXVW%F8$R9)6}f=NQiLPt8qt3xNUQI>Q*)H1lzk<&n?XR-f}tc&9V0H0lhGqHJ^N
zN%h(9-Of2_)!Xk{qdIkU>1%mk%I_Id1!MU*yq&&>)Q+!L^t&-2mW9Xq7g9C@*
zl&PKJ&su2L+iku?Te?Pf?k3tUK){Bj_gb&aPo8Ago^XI~mRTd(5{&^tf1)!-lSMha
z@$~ae!r(~`=p&|mMxy2EiZQ6FvXb(1avS*`Pj%$)*?vwceGKHmHnl`v&fEQ_Wh+G)
zEPQ^3&oV%}%;zF`AM|S%d>pM@1}33PN5*4SewROk_K$n^i8QjaYiRzwG8#OvVIF|{x85wH+?*P*%)woI
zR538k@=(E`V;p1UwA|fqSh`$n_t;Sz4T)`_s~pRR4lbmWWSdxa-FqLZ%fLT)Bh?iye?COx~mO1wkn5)HNMg7`8~
z25VJhz&3Z7`M>6luJrEw$Jikft+6SxyIh?)PU1?DfrKMGC
z=3T;;omE4H`PWqF8?0*dOA3o9y@~WK`S}{?tIHquEw?v`M^D%Lobpdrp%3}1=-&qk
zqAtb1px-1Fy6}E8IUg4s%8B0~P<P5C;de%@n~XnDKF@fr$a+^@$^P|>vlw($aSK2lRtLt~8tRb`I0
znfI!G?K|<5ry*gk>y56rZy0NkK6)))6Mg1=K?7yS9p+#1Ij=W*%5Rt-mlc;#MOnE9
zoi`-+6oj@)`gq2Af!B+9%J#K9V=ji2dj2<_qaLSXOCeqQ&<0zMSb$5mAi;HU=v`v<>NYk}MbD!ewYVB+N-ctzn=l&bTwv)*7
zmY<+Y@SBbtl9PPk$HTR?ln@(T92XjTRj0Mx|Mzl;lW>Su_y^~fh?8(L?oz8h!cCpb
zZG-OY=NJ3{>r*`U<(J%#zjFT-a9>u6+23H{=d(utkgqt7@^)C;pkb)fQ|Q=*8*SyT
z;otKe+f8fEp)ZacKZDn3TNzs>_Kx+g*c_mr8LBhr8GnoEmAQk#%sR52`bdbW8Ms$!0u2bdt=T-lK3JbDW`F(Urt%Ob2seiN>7U`YN}aOdIiCC;eeufJC#m3S
z9#|l2c?G@t*hH5y^76jkv)rs4H+;oiTuY5FQwRMN_7NUqeiD|b&RyxPXQz|3qC(_>
zZJMwjC4F!1m2INXqzisQ4X^w=>&(+Ecdu&~IWEMn7f*YcYI&eWI(6hI#f114%aymM
zyhlG6{q>XN7(LyGiMAS&qijR%d2rV|>AUT_sE&EKUSTCM26>aKzNxk0?K|utOcxl#
zxIOwM#O!!H+QzbX*&p=QuKe4y;bS>&StQOE5AEGg_ubk8{;1yOVAJfE_Js-lL7rr9
z)CEuFIlkApj~uV^zJK7KocjT=4B
zJP(}0x}|A7C$$5gIp>KBPZ|A#2Ew;$#g9Fk)r;Q~?G$>x<+JM)J3u>j
zi68K=I;ld`JJ?Nq+^_B?C+Q%+x#m{9JF$tbaDeNIep%=^#>KHGtg=L)>m
z_J&vaZTs2{qP!4Gdw5u5Kcf}5R4(q}Lebx%(J$7l*Q`Il#pCTM%!`y5y*-~zIVs}D
z9;t+(xmV~R65^ZQXe+<5{$QW0O8MT~a{kdFLR)nfRMA9L(YU>x*DTltN#m-2km
zC;T`cfb{c`mcx(z7o_a8bYJn8_^dz4Cq!DZ37{P6uF{@#519UWK1{>(9sZB1I^6MmNc39MJ-_|)!S8vO+O3&$MulU3Gc
z_W{N*B(yneyl-oN_MKaJ{CZ6dv-~^8uPbLSh&0jfV@EfA{2Dc!_rOyfx`R0T@LonA
z<*%O?-aa_Wm-z$s@K(ex7UhM0-?9C=PkYdk&d2n((E4>&(f4D`fOQY%CURMMyJyU`
zVeJBAId&StHjw76tnwSqZs3e0683`L{a3k9JYdg#(ZVw4J`&CkV-2LFaDE1Z?CehVy%vZx$tM3tTax8E@2;N^QTrPcI?Ob8uK!DM0_sfE6ks2M?iw
zPS4{(k-PF*-oY>S!d9;L+|xdTtLen9B2LvpL4k;#ScB<
z$NP_7j~7)5eXuoYEk*dK_rSz9yT_C4B{r~^#^o}-VQI=Y?01|$aa!a7=UEm$|DsQQ
zfLK1qmho2@)nwA?$1%T6jwO2HZ({6&;`s|OQOxI4S8*Hw=Qp!b(gNJR%SAj&wGa>^&2@x)Vj
zhd^WfzJ^b0O{E^q82Pw({uT`E`MT2WnZ02{E%t*yRPN>?W>0vU^4@Vyh4;mLj918c
z*s*papo?<}cQM{5lcgZScx}?usg{mS!KkH9U%@|^_33?{FI{1ss+8kXyFY&5M-e~f
zM$){FF;_+z3sNJ)Er~{Beux$fEl{R4|7WKcpEsGtK57f+H0DJ$hI;U;JtF>+lG@sV
zQI_;bQ^7XIJ>Bs?C32b1v;am;P4GUqAJ#zOHv}4SmV|xXX6~O9&e_~YCCpbT>s$`!
k<4FtN!5 impl IntoView {
+ // Provides context that manages stylesheets, titles, meta tags, etc.
+ provide_meta_context();
+
+ view! {
+
+
+ // injects a stylesheet into the document
+ // id=leptos means cargo-leptos will hot-reload this stylesheet
+
+
+ // sets the document title
+
+
+ // content for this welcome page
+
+ }
+ .into_view()
+ }>
+
+
+
+
+
+
+ }
+}
+
+/// Renders the home page of your application.
+#[component]
+fn HomePage() -> impl IntoView {
+ let hello = Action::::server();
+ view! {
+
+
+
+ {format!("{err:#?}")}
}>
+ {
+ move || hello.value().get().map(|h|match h {
+ Ok(h) => h.into_view(),
+ err => err.into_view()
+ })
+ }
+
+
+
+ }
+}
+
+#[cfg_attr(feature="ssr",derive(utoipa::ToSchema))]
+#[derive(Debug,Copy,Clone,serde::Serialize,serde::Deserialize)]
+pub struct SayHello {
+ say:bool,
+}
+
+// the following function comment is what our GPT will get
+/// Call to say hello world, or call to not say hello world.
+#[cfg_attr(feature="ssr",utoipa::path(
+ post,
+ path = "/api/hello_world",
+ responses(
+ (status = 200, description = "Hello world from server or maybe not?", body = String),
+ ),
+ params(
+ ("say_whut" = SayHello, description = "If true then say hello, if false then don't."),
+ )
+))]
+#[server(
+ // we need to encoude our server functions as json because that's what openai generates
+ input=server_fn::codec::Json,
+ endpoint="hello_world"
+)]
+pub async fn hello_world(say_whut:SayHello) -> Result {
+ if say_whut.say {
+ Ok("hello world".to_string())
+ } else {
+ Ok("not hello".to_string())
+ }
+}
+
+
+/// Takes a list of names
+#[cfg_attr(feature="ssr",utoipa::path(
+ post,
+ path = "/api/name_list",
+ responses(
+ (status = 200, description = "The same list you got back", body = String),
+ ),
+ params(
+ ("list" = Vec, description = "A list of names"),
+ )
+))]
+#[server(
+ input=server_fn::codec::Json,
+ endpoint="name_list"
+)]
+pub async fn name_list(list:Vec) -> Result,ServerFnError> {
+ Ok(list)
+}
+
+
+
+#[derive(Clone,Debug,PartialEq,serde::Serialize,serde::Deserialize)]
+pub struct AiServerCall{
+ pub path:String,
+ pub args:String,
+}
+
+
+// Don't include our AI function in the OpenAPI
+#[server]
+pub async fn ai_msg(msg:String) -> Result {
+ crate::open_ai::call_gpt_with_api(msg).await.get(0).cloned().ok_or(ServerFnError::new("No first message"))
+}
+
+#[component]
+pub fn AiSayHello() -> impl IntoView {
+ let ai_msg = Action::::server();
+ let result = create_rw_signal(Vec::new());
+ view!{
+
+
+
+
+
+ }
+}
\ No newline at end of file
diff --git a/projects/openapi-openai-api-swagger-ui/src/error_template.rs b/projects/openapi-openai-api-swagger-ui/src/error_template.rs
new file mode 100644
index 000000000..1e0508da5
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/src/error_template.rs
@@ -0,0 +1,72 @@
+use http::status::StatusCode;
+use leptos::*;
+use thiserror::Error;
+
+#[derive(Clone, Debug, Error)]
+pub enum AppError {
+ #[error("Not Found")]
+ NotFound,
+}
+
+impl AppError {
+ pub fn status_code(&self) -> StatusCode {
+ match self {
+ AppError::NotFound => StatusCode::NOT_FOUND,
+ }
+ }
+}
+
+// A basic function to display errors served by the error boundaries.
+// Feel free to do more complicated things here than just displaying the error.
+#[component]
+pub fn ErrorTemplate(
+ #[prop(optional)] outside_errors: Option,
+ #[prop(optional)] errors: Option>,
+) -> impl IntoView {
+ let errors = match outside_errors {
+ Some(e) => create_rw_signal(e),
+ None => match errors {
+ Some(e) => e,
+ None => panic!("No Errors found and we expected errors!"),
+ },
+ };
+ // Get Errors from Signal
+ let errors = errors.get_untracked();
+
+ // Downcast lets us take a type that implements `std::error::Error`
+ let errors: Vec = errors
+ .into_iter()
+ .filter_map(|(_k, v)| v.downcast_ref::().cloned())
+ .collect();
+ println!("Errors: {errors:#?}");
+
+ // Only the response code for the first error is actually sent from the server
+ // this may be customized by the specific application
+ #[cfg(feature = "ssr")]
+ {
+ use leptos_axum::ResponseOptions;
+ let response = use_context::();
+ if let Some(response) = response {
+ response.set_status(errors[0].status_code());
+ }
+ }
+
+ view! {
+
{if errors.len() > 1 {"Errors"} else {"Error"}}
+ {error_code.to_string()}
+
"Error: " {error_string}
+ }
+ }
+ />
+ }
+}
diff --git a/projects/openapi-openai-api-swagger-ui/src/fileserv.rs b/projects/openapi-openai-api-swagger-ui/src/fileserv.rs
new file mode 100644
index 000000000..e8435765c
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/src/fileserv.rs
@@ -0,0 +1,42 @@
+use axum::{
+ body::Body,
+ extract::State,
+ response::IntoResponse,
+ http::{Request, Response, StatusCode, Uri},
+};
+use axum::response::Response as AxumResponse;
+use tower::ServiceExt;
+use tower_http::services::ServeDir;
+use leptos::*;
+use crate::app::App;
+
+pub async fn file_and_error_handler(uri: Uri, State(options): State, req: Request) -> AxumResponse {
+ let root = options.site_root.clone();
+ let res = get_static_file(uri.clone(), &root).await.unwrap();
+
+ if res.status() == StatusCode::OK {
+ res.into_response()
+ } else {
+ let handler = leptos_axum::render_app_to_stream(options.to_owned(), App);
+ handler(req).await.into_response()
+ }
+}
+
+async fn get_static_file(
+ uri: Uri,
+ root: &str,
+) -> Result, (StatusCode, String)> {
+ let req = Request::builder()
+ .uri(uri.clone())
+ .body(Body::empty())
+ .unwrap();
+ // `ServeDir` implements `tower::Service` so we can call it with `tower::ServiceExt::oneshot`
+ // This path is relative to the cargo root
+ match ServeDir::new(root).oneshot(req).await {
+ Ok(res) => Ok(res.into_response()),
+ Err(err) => Err((
+ StatusCode::INTERNAL_SERVER_ERROR,
+ format!("Something went wrong: {err}"),
+ )),
+ }
+}
diff --git a/projects/openapi-openai-api-swagger-ui/src/lib.rs b/projects/openapi-openai-api-swagger-ui/src/lib.rs
new file mode 100644
index 000000000..4ec8678ad
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/src/lib.rs
@@ -0,0 +1,27 @@
+pub mod app;
+pub mod error_template;
+#[cfg(feature = "ssr")]
+pub mod fileserv;
+#[cfg(feature="ssr")]
+pub mod open_ai;
+#[cfg(feature = "hydrate")]
+#[wasm_bindgen::prelude::wasm_bindgen]
+pub fn hydrate() {
+ use crate::app::*;
+ console_error_panic_hook::set_once();
+ leptos::mount_to_body(App);
+}
+
+
+#[cfg(feature="ssr")]
+pub mod api_doc {
+ use crate::app::__path_hello_world;
+ use crate::app::SayHello;
+ use crate::app::__path_name_list;
+ #[derive(utoipa::OpenApi)]
+ #[openapi(
+ info(description = "My Api description"),
+ paths(hello_world,name_list), components(schemas(SayHello)),
+ )]
+ pub struct ApiDoc;
+}
\ No newline at end of file
diff --git a/projects/openapi-openai-api-swagger-ui/src/main.rs b/projects/openapi-openai-api-swagger-ui/src/main.rs
new file mode 100644
index 000000000..677938965
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/src/main.rs
@@ -0,0 +1,42 @@
+#[cfg(feature = "ssr")]
+#[tokio::main]
+async fn main() {
+ use axum::Router;
+ use leptos::*;
+ use leptos_axum::{generate_route_list, LeptosRoutes};
+ use openapi_swagger_ui::app::*;
+ use openapi_swagger_ui::api_doc::ApiDoc;
+ use openapi_swagger_ui::fileserv::file_and_error_handler;
+ use utoipa::OpenApi;
+
+ // Setting get_configuration(None) means we'll be using cargo-leptos's env values
+ // For deployment these variables are:
+ //
+ // Alternately a file can be specified such as Some("Cargo.toml")
+ // The file would need to be included with the executable when moved to deployment
+ let conf = get_configuration(None).await.unwrap();
+ let leptos_options = conf.leptos_options;
+ let addr = leptos_options.site_addr;
+ let routes = generate_route_list(App);
+
+ // build our application with a route
+ let app = Router::new()
+ .leptos_routes(&leptos_options, routes, App)
+ .fallback(file_and_error_handler)
+ .merge(utoipa_swagger_ui::SwaggerUi::new("/swagger-ui")
+ .url("/api-docs/openapi.json", ApiDoc::openapi()))
+ .with_state(leptos_options);
+
+ let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
+ logging::log!("listening on http://{}", &addr);
+ axum::serve(listener, app.into_make_service())
+ .await
+ .unwrap();
+}
+
+#[cfg(not(feature = "ssr"))]
+pub fn main() {
+ // no client-side main function
+ // unless we want this to work with e.g., Trunk for a purely client-side app
+ // see lib.rs for hydration function instead
+}
diff --git a/projects/openapi-openai-api-swagger-ui/src/open_ai.rs b/projects/openapi-openai-api-swagger-ui/src/open_ai.rs
new file mode 100644
index 000000000..022fbd1a0
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/src/open_ai.rs
@@ -0,0 +1,267 @@
+/*
+Follows
+https://cookbook.openai.com/examples/function_calling_with_an_openapi_spec
+closely
+*/
+
+pub static SYSTEM_MESSAGE :&'static str = "
+You are a helpful assistant.
+Respond to the following prompt by using function_call and then summarize actions.
+Ask for clarification if a user request is ambiguous.
+";
+use serde_json::Map;
+use openai_dive::v1::api::Client;
+use openai_dive::v1::models::Gpt4Engine;
+use std::env;
+use openai_dive::v1::resources::chat::{
+ ChatCompletionFunction, ChatCompletionParameters, ChatCompletionTool, ChatCompletionToolType, ChatMessage,
+ ChatMessageContent,Role,
+};
+use utoipa::openapi::schema::Array;
+use serde_json::Value;
+use utoipa::openapi::schema::SchemaType;
+use utoipa::openapi::schema::Schema;
+use utoipa::OpenApi;
+use serde_json::json;
+use utoipa::openapi::path::{PathItemType,Parameter};
+use utoipa::openapi::Required;
+use utoipa::openapi::schema::Object;
+use utoipa::openapi::RefOr;
+pub fn make_openapi_call_via_gpt(message:String) -> ChatCompletionParameters {
+ let docs = super::api_doc::ApiDoc::openapi();
+ let mut functions = vec![];
+ // get each path and it's path item object
+ for (path,path_item) in docs.paths.paths.iter(){
+ // all our server functions are post.
+ let operation = path_item.operations.get(&PathItemType::Post).expect("Expect POST op");
+ // This name will be given to the OpenAI API as part of our functions
+ let name = operation.operation_id.clone().expect("Each operation to have an operation id");
+
+ // we'll use the descrition
+ let desc = operation.description.clone().expect("Each operation to have a description, this is how GPT knows what the functiond does and it is helpful for calling it.");
+ let mut required_list = vec![];
+ let mut properties = serde_json::Map::new();
+ if let Some(params) = operation.parameters.clone() {
+ leptos::logging::log!("{params:#?}");
+ for Parameter{name,description,required,schema,..} in params.into_iter() {
+ if required == Required::True {
+ required_list.push(name.clone());
+ }
+ let description = description.unwrap_or_default();
+ if let Some(RefOr::Ref(utoipa::openapi::schema::Ref{ref_location,..})) = schema {
+ let schema_name = ref_location.split('/').last().expect("Expecting last after split");
+ let RefOr::T(schema) = docs.components
+ .as_ref()
+ .expect("components")
+ .schemas
+ .get(schema_name)
+ .cloned()
+ .expect("{schema_name} to be in components as a schema") else {panic!("expecting T")};
+ let mut output = Map::new();
+ parse_schema_into_openapi_property(name.clone(),schema,&mut output);
+ properties.insert(name,serde_json::Value::Object(output));
+ } else if let Some(RefOr::T(schema)) = schema {
+ let mut output = Map::new();
+ parse_schema_into_openapi_property(name.clone(),schema,&mut output);
+ properties.insert(name.clone(),serde_json::Value::Object(output));
+ }
+
+ }
+ }
+ let parameters = json!({
+ "type": "object",
+ "properties": properties,
+ "required": required_list,
+ });
+ leptos::logging::log!("{parameters}");
+
+ functions.push(
+ ChatCompletionFunction {
+ name,
+ description: Some(desc),
+ parameters,
+ }
+ )
+ }
+
+ ChatCompletionParameters {
+ model: Gpt4Engine::Gpt41106Preview.to_string(),
+ messages: vec![
+ ChatMessage {
+ role:Role::System,
+ content: ChatMessageContent::Text(SYSTEM_MESSAGE.to_string()),
+ ..Default::default()
+ },
+ ChatMessage {
+ role:Role::User,
+ content: ChatMessageContent::Text(message),
+ ..Default::default()
+ }],
+ tools: Some(functions.into_iter().map(|function|{
+ ChatCompletionTool {
+ r#type: ChatCompletionToolType::Function,
+ function,
+ }
+ }).collect::>()),
+ ..Default::default()
+ }
+}
+
+
+pub fn parse_schema_into_openapi_property(
+ name:String,
+ schema:Schema,
+output: &mut serde_json::Map::) {
+
+ let docs = super::api_doc::ApiDoc::openapi();
+ match schema {
+ Schema::Object(Object{
+ schema_type,
+ required,
+ properties,
+ ..
+ }) => match schema_type{
+ SchemaType::Object => {
+ output.insert("type".to_string(),Value::String("object".to_string()));
+ output.insert("required".to_string(),Value::Array(required.into_iter()
+ .map(|s|Value::String(s))
+ .collect::>()));
+ output.insert("properties".to_string(),{
+ let mut map = Map::new();
+ for (key,val) in properties
+ .into_iter()
+ .map(|(key,val)|{
+ let RefOr::T(schema) = val else {panic!("expecting t")};
+ let mut output = Map::new();
+ parse_schema_into_openapi_property(name.clone(),schema,&mut output);
+ (key,output)
+ }) {
+ map.insert(key,Value::Object(val));
+ }
+ Value::Object(map)
+ });
+
+ },
+ SchemaType::Value => {
+ panic!("not expecting Value here.");
+
+ },
+ SchemaType::String => {
+ output.insert("type".to_string(),serde_json::Value::String("string".to_string()));
+
+ },
+ SchemaType::Integer => {
+ output.insert("type".to_string(),serde_json::Value::String("integer".to_string()));
+
+ },
+ SchemaType::Number => {
+ output.insert("type".to_string(),serde_json::Value::String("number".to_string()));
+
+ },
+ SchemaType::Boolean => {
+ output.insert("type".to_string(),serde_json::Value::String("boolean".to_string()));
+
+ },
+ SchemaType::Array => {
+ output.insert("type".to_string(),serde_json::Value::String("array".to_string()));
+
+ },
+
+ },
+ Schema::Array(Array{schema_type,items,..}) => {
+ match schema_type {
+ SchemaType::Array => {
+ let mut map = Map::new();
+ if let RefOr::Ref(utoipa::openapi::schema::Ref{ref_location,..}) = *items {
+ let schema_name = ref_location.split('/').last().expect("Expecting last after split");
+ let RefOr::T(schema) = docs.components
+ .as_ref()
+ .expect("components")
+ .schemas
+ .get(schema_name)
+ .cloned()
+ .expect("{schema_name} to be in components as a schema") else {panic!("expecting T")};
+ let mut map = Map::new();
+ parse_schema_into_openapi_property(name.clone(),schema,&mut map);
+ output.insert(name.clone(),serde_json::Value::Object(map));
+ } else if let RefOr::T(schema) = *items {
+ let mut map = Map::new();
+ parse_schema_into_openapi_property(name.clone(),schema,&mut map);
+ output.insert(name,serde_json::Value::Object(map));
+ }
+ },
+ _ => panic!("if schema is an array, then I'm expecting schema type to be an array ")
+ }
+ }
+ _ => panic!("I don't know how to handle this yet.")
+ }
+
+}
+// let docs = super::api_doc::ApiDoc::openapi();
+use crate::app::AiServerCall;
+pub async fn call_gpt_with_api(message:String) -> Vec {
+ let api_key = std::env::var("OPENAI_API_KEY").expect("$OPENAI_API_KEY is not set");
+
+ let client = Client::new(api_key);
+
+ let completion_parameters = make_openapi_call_via_gpt(message);
+
+ let result = client.chat().create(completion_parameters).await.unwrap();
+ let message = result.choices[0].message.clone();
+ let mut res = vec![];
+ if let Some(tool_calls) = message.clone().tool_calls {
+ for tool_call in tool_calls {
+ let name = tool_call.function.name;
+ let arguments = tool_call.function.arguments;
+ res.push(AiServerCall{
+ path:name,
+ args:arguments,
+ });
+ }
+ }
+ res
+}
+
+/*
+def openapi_to_functions(openapi_spec):
+ functions = []
+
+ for path, methods in openapi_spec["paths"].items():
+ for method, spec_with_ref in methods.items():
+ # 1. Resolve JSON references.
+ spec = jsonref.replace_refs(spec_with_ref)
+
+ # 2. Extract a name for the functions.
+ function_name = spec.get("operationId")
+
+ # 3. Extract a description and parameters.
+ desc = spec.get("description") or spec.get("summary", "")
+
+ schema = {"type": "object", "properties": {}}
+
+ req_body = (
+ spec.get("requestBody", {})
+ .get("content", {})
+ .get("application/json", {})
+ .get("schema")
+ )
+ if req_body:
+ schema["properties"]["requestBody"] = req_body
+
+ params = spec.get("parameters", [])
+ if params:
+ param_properties = {
+ param["name"]: param["schema"]
+ for param in params
+ if "schema" in param
+ }
+ schema["properties"]["parameters"] = {
+ "type": "object",
+ "properties": param_properties,
+ }
+
+ functions.append(
+ {"type": "function", "function": {"name": function_name, "description": desc, "parameters": schema}}
+ )
+
+ return functions */
\ No newline at end of file
diff --git a/projects/openapi-openai-api-swagger-ui/style/main.scss b/projects/openapi-openai-api-swagger-ui/style/main.scss
new file mode 100644
index 000000000..e4538e156
--- /dev/null
+++ b/projects/openapi-openai-api-swagger-ui/style/main.scss
@@ -0,0 +1,4 @@
+body {
+ font-family: sans-serif;
+ text-align: center;
+}
\ No newline at end of file