diff --git a/.github/release-drafter.yaml b/.github/release-drafter.yaml new file mode 100644 index 0000000..b7b6893 --- /dev/null +++ b/.github/release-drafter.yaml @@ -0,0 +1,7 @@ +categories: + - title: 'Dependency updates' + labels: + - 'dependency' +template: | + ## What’s Changed + $CHANGES \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..283e89e --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,72 @@ +name: CI + +on: + push: + branches: [ '**' ] + tags: [ '**' ] + pull_request: + branches: [ '**' ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - uses: coursier/cache-action@v5 + - uses: coursier/setup-action@v1.1.2 + with: + jvm: adopt:11 + - name: Check format + run: ./millw __.checkFormat + - name: Compile all + run: ./millw __.compile + - name: Scalafix check + run: ./millw __.fix --check + - name: Run tests + run: ./millw __.test + + - name: Setup GPG secrets for publish + if: github.event_name != 'pull_request' && github.event_name == 'push' && ((startsWith(github.ref, 'refs/tags/')) || github.ref == 'refs/heads/main') + run: | + gpg --version + cat <(echo "${{ secrets.GPG_SECRET_KEY }}") | base64 --decode | gpg --batch --import + + - name: Publish to Nexus Repository + if: github.event_name != 'pull_request' && github.event_name == 'push' && ((startsWith(github.ref, 'refs/tags/')) || github.ref == 'refs/heads/main') + run: | + ./millw mill.scalalib.PublishModule/publishAll \ + __.publishArtifacts \ + --sonatypeCreds '${{ secrets.SONATYPE_CREDS }}' \ + --stagingRelease false \ + --sonatypeUri "https://nexus.iog.solutions/repository/maven-release/" \ + --sonatypeSnapshotUri "https://nexus.iog.solutions/repository/maven-snapshot/" \ + --gpgArgs --batch,--yes,-a,-b + + publish-notes: + name: Publish release notes + if: github.event_name != 'pull_request' && (startsWith(github.ref, 'refs/tags/')) + needs: [ build ] + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Extract version from commit message + run: | + version=${GITHUB_REF#refs/*/} + echo "VERSION=$version" >> $GITHUB_ENV + env: + COMMIT_MSG: ${{ github.event.head_commit.message }} + - name: Publish release notes + uses: release-drafter/release-drafter@v5 + with: + config-name: release-drafter.yaml + publish: true + name: "v${{ env.VERSION }}" + tag: "${{ env.VERSION }}" + version: "v${{ env.VERSION }}" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/dependency-graph.yaml b/.github/workflows/dependency-graph.yaml new file mode 100644 index 0000000..6bcf06f --- /dev/null +++ b/.github/workflows/dependency-graph.yaml @@ -0,0 +1,17 @@ +name: github-dependency-graph + +on: + push: + branches: + - main + +jobs: + submit-dependency-graph: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: coursier/cache-action@v6 + - uses: coursier/setup-action@v1.1.2 + with: + jvm: adopt:11 + - uses: ckipp01/mill-dependency-submission@v1 \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..adeb9e8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +out +.idea +.vscode +.metals +*.iml + +mill-bsp.json + +project +target \ No newline at end of file diff --git a/.mill-version b/.mill-version new file mode 100644 index 0000000..9028ec6 --- /dev/null +++ b/.mill-version @@ -0,0 +1 @@ +0.10.5 diff --git a/.scalafix.conf b/.scalafix.conf new file mode 100644 index 0000000..b9bbc61 --- /dev/null +++ b/.scalafix.conf @@ -0,0 +1,34 @@ +rules = [ + ExplicitResultTypes + NoAutoTupling + NoValInForComprehension + OrganizeImports + ProcedureSyntax + RemoveUnused + DisableSyntax + LeakingImplicitClassVal +] + +OrganizeImports { + # Align with IntelliJ IDEA so that they don't fight each other + groupedImports = Merge +} + +RemoveUnused { + imports = false // handled by OrganizeImports + // removing unused private variables can break compilation if the variable is written + // to but never read). It also often is a symptom of a bug so a manual intervention is preferable + privates = false + locals = false +} + +DisableSyntax.noVars = true +DisableSyntax.noThrows = false +DisableSyntax.noNulls = true +DisableSyntax.noReturns = true +DisableSyntax.noAsInstanceOf = false +DisableSyntax.noIsInstanceOf = true +DisableSyntax.noXml = true +DisableSyntax.noFinalVal = true +DisableSyntax.noFinalize = true +DisableSyntax.noValPatterns = true \ No newline at end of file diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 0000000..956f6dc --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1,3 @@ +version = 3.5.3 +maxColumn = 140 +runner.dialect = scala213 \ No newline at end of file diff --git a/CODE-OF-CONDUCT.md b/CODE-OF-CONDUCT.md new file mode 100644 index 0000000..9a5a01c --- /dev/null +++ b/CODE-OF-CONDUCT.md @@ -0,0 +1,73 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +education, socio-economic status, nationality, personal appearance, race, +religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at {{ email }}. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at + +[homepage]: https://www.contributor-covenant.org diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..651b0b9 --- /dev/null +++ b/README.md @@ -0,0 +1,118 @@ +# armadillo + +[![CI](https://github.com/input-output-hk/armadillo/workflows/CI/badge.svg)](https://github.com/input-output-hk/armadillo/actions?query=workflow%3A%22CI%22) + +Armadillo allows you to easily represent your [json-rpc](https://www.jsonrpc.org/) endpoints as regular scala values. +These endpoints can be later turn into a http server via [tapir](https://github.com/softwaremill/tapir) or +always up-to-date [openRpc](https://open-rpc.org/getting-started) documentation. + +## Why another library + +We created armadillo because we wanted to have always up-to-date, automatically generated documentation for our api. +We looked into tapir as we liked the idea of representing endpoints as pure values but since it is build around http protocol it lacked +ability to represent json-rpc routing which from the http perspective is a single dynamic route (the routing is based on the part of the json payload). +See https://github.com/softwaremill/tapir/issues/621 for details. + +## Quick demo + +```scala +implicit val rpcBlockResponseEncoder: Encoder[GreetingResponse] = deriveEncoder +implicit val rpcBlockResponseDecoder: Decoder[GreetingResponse] = deriveDecoder +implicit val rpcBlockResponseSchema: Schema[GreetingResponse] = Schema.derived + +case class GreetingResponse(msg: String) + +val helloEndpoint: JsonRpcServerEndpoint[IO] = jsonRpcEndpoint(m"say_hello") + .in(param[String]("name")) + .out[GreetingResponse]("greetings") + .serverLogic[IO] { name => + IO(Right(GreetingResponse(s"Hello $name"))) + } + +val tapirInterpreter = new TapirInterpreter[IO, Json](new CirceJsonSupport) +val tapirEndpoint = tapirInterpreter.toTapirEndpointUnsafe(List(helloEndpoint)) +val routes = Http4sServerInterpreter[IO](Http4sServerOptions.default[IO]).toRoutes(tapirEndpoint) + +BlazeServerBuilder[IO] + .withExecutionContext(ec) + .bindHttp(8080, "localhost") + .withHttpApp(Router("/" -> routes).orNotFound) + .resource + .flatMap { _ => + AsyncHttpClientCatsBackend.resource[IO]() + } + .use { client => + val request = json"""{"jsonrpc": "2.0", "method": "say_hello", "params": ["kasper"], "id": 1}""" + SttpClientInterpreter() + .toClient(tapirEndpoint.endpoint, Some(Uri.apply("localhost", 8080)), client) + .apply(request.noSpaces) + .map { response => + println(s"Response: $response") + } + } + .unsafeRunSync() +``` + +## How it works + +1. Using armadillo building blocks describe your jsonrpc endpoints +2. Attach server logic to created endpoints descriptions +3. Convert armadillo endpoints to a single tapir endpoint and expose it via one of available http servers +4. Bonus: automatically generate openRpc documentation and expose it under rpc.discover endpoint + +Head over to the [examples](./example) to see armadillo in action! + +## Quickstart with sbt +Add the following dependency: + +``` +"io.iohk.armadillo" %% "armadillo-core" % "0.0.10" +``` + +## Quickstart with mill +Add the following dependency: + +``` +ivy"io.iohk.armadillo::armadillo-core::0.0.10" +``` + + +## Modules description + +- core - pure definition of armadillo +- json + - circe - support for circe library + - json4s - support for json4s library +- server + - tapir - a server interpreter from armadillo => tapir + - fs2 - a server interpreter from armadillo => fs2.pipe +- example - module which pulls all the things together to show the power of armadillo +- openrpc - interpreter to openrpc + - model - openrpc structures + - circe - circe codecs for openrpc structures + - circeYaml - extension methods to convert openrpc doc into yaml file +- trace4cats - support for tracing library + +## Developer notes + +Armadillo uses [mill](https://com-lihaoyi.github.io/mill/mill/Intro_to_Mill.html) as its build tool. + +To import project into intellij idea call `./millw mill.scalalib.GenIdea/idea`. + +If you would like to use bsp instead, call `./millw mill.bsp.BSP/install`. + +Releases are fully automated using github actions, simply push a new tag to create a new version. +Note that mill will try to use the tag name directly as a maven artifact version. + +## Testing + +Weaver exposes a JUnit runner, so tests can be run from Intellij, provided you have JUnit plugin enabled. + +To run only selected tests, weaver allows you to tag them with: `test("test name".only)`. + +## Credits + +This library is inspired by another great library - [tapir](https://github.com/softwaremill/tapir). + +Also, big thanks to [Adam Warski](https://github.com/adamw) for reviewing my initial design and patiently answering all of +my questions about design choices he took in tapir. diff --git a/build.sc b/build.sc new file mode 100644 index 0000000..b8aad69 --- /dev/null +++ b/build.sc @@ -0,0 +1,289 @@ +import $ivy.`com.goyeau::mill-scalafix_mill0.10:0.2.9` +import $ivy.`de.tototec::de.tobiasroeser.mill.vcs.version_mill0.10:0.1.4` +import $ivy.`io.github.davidgregory084::mill-tpolecat_mill0.10:0.3.0` +import com.goyeau.mill.scalafix.ScalafixModule +import de.tobiasroeser.mill.vcs.version.VcsVersion +import io.github.davidgregory084.TpolecatModule +import mill._ +import mill.scalalib._ +import mill.scalalib.bsp.ScalaMetalsSupport +import mill.scalalib.publish.{Developer, License, PomSettings, VersionControl} +import mill.scalalib.scalafmt.ScalafmtModule + +object core extends CommonModule with ArmadilloPublishModule { + override def ivyDeps = Agg( + ivy"com.softwaremill.sttp.tapir::tapir-core::${Version.Tapir}" + ) + + object test extends Tests with CommonTestModule +} + +object json extends CommonModule { + object circe extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(core, server) + override def ivyDeps = Agg( + ivy"com.softwaremill.sttp.tapir::tapir-json-circe:${Version.Tapir}" + ) + + object test extends Tests with CommonTestModule + + } + object json4s extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(core, server) + override def ivyDeps = Agg( + ivy"com.softwaremill.sttp.tapir::tapir-json-json4s:${Version.Tapir}" + ) + + object test extends Tests with CommonTestModule + } +} + +object openrpc extends CommonModule with ArmadilloPublishModule { + object model extends CommonModule with ArmadilloPublishModule { + override def ivyDeps = Agg( + ivy"com.softwaremill.sttp.apispec::apispec-model::${Version.Apispec}", + ivy"com.softwaremill.sttp.apispec::jsonschema-circe::${Version.Apispec}" + ) + } + object circe extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(model) + override def ivyDeps = Agg( + ivy"io.circe::circe-core::${Version.Circe}", + ivy"io.circe::circe-parser::${Version.Circe}", + ivy"io.circe::circe-generic::${Version.Circe}" + ) + } + object circeYaml extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(circe) + + override def ivyDeps = Agg(ivy"io.circe::circe-yaml::${Version.Circe}") + } + + override def moduleDeps = Seq(core, circeYaml) + + object test extends Tests with CommonTestModule { + override def moduleDeps = Seq(openrpc, json.circe) + override def ivyDeps = Agg( + WeaverDep, + ivy"org.typelevel::cats-effect::${Version.CatsEffect}" + ) + } +} + +object server extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(core) + override def ivyDeps = Agg( + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}" + ) + + object tapir extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(core, server) + override def ivyDeps = Agg( + ivy"com.softwaremill.sttp.tapir::tapir-core::${Version.Tapir}" + ) + + object test extends Tests with CommonTestModule { + override def moduleDeps = Seq(core, json.circe, tapir, server.test) + override def ivyDeps = Agg( + WeaverDep, + ivy"com.softwaremill.sttp.tapir::tapir-http4s-server::${Version.Tapir}", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}", + ivy"com.softwaremill.sttp.tapir::tapir-sttp-client::${Version.Tapir}", + ivy"com.softwaremill.sttp.client3::async-http-client-backend-cats::${Version.Sttp}", + ivy"org.http4s::http4s-blaze-server::${Version.Http4s}", + ivy"com.softwaremill.sttp.client3::circe::${Version.Sttp}", + ivy"org.typelevel::cats-effect::${Version.CatsEffect}" + ) + } + } + + object fs2 extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(core, json.circe, server) + override def ivyDeps = Agg( + ivy"co.fs2::fs2-core::3.2.5", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}" + ) + } + + object test extends Tests with CommonTestModule { + override def moduleDeps = Seq(core, json.circe, json.json4s) + override def ivyDeps = Agg( + WeaverDep, + ivy"io.circe::circe-literal::${Version.Circe}", + ivy"org.json4s::json4s-core::${Version.Json4s}", + ivy"org.json4s::json4s-jackson:${Version.Json4s}", + ivy"org.typelevel::cats-effect::${Version.CatsEffect}" + ) + } + + object stub extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(core, server, server.tapir) + override def ivyDeps = Agg( + ivy"com.softwaremill.sttp.client3::core::${Version.Sttp}", + ivy"com.softwaremill.sttp.tapir::tapir-sttp-stub-server::${Version.Tapir}" + ) + object test extends Tests with CommonTestModule { + override def moduleDeps = Seq(json.circe, stub) + override def ivyDeps = Agg( + WeaverDep, + ivy"io.circe::circe-literal::${Version.Circe}", + ivy"org.typelevel::cats-effect::${Version.CatsEffect}", + ivy"com.softwaremill.sttp.client3::cats::${Version.Sttp}", + ivy"com.softwaremill.sttp.client3::circe::${Version.Sttp}" + ) + } + } +} + +object example extends CommonModule { + + object json4sApp extends CommonModule { + override def moduleDeps = Seq(core, server.tapir, json.json4s) + + override def ivyDeps = + Agg( + ivy"org.typelevel::cats-effect::${Version.CatsEffect}", + ivy"org.http4s::http4s-dsl::${Version.Http4s}", + ivy"org.http4s::http4s-circe::${Version.Http4s}", + ivy"org.http4s::http4s-blaze-server::${Version.Http4s}", + ivy"com.softwaremill.sttp.tapir::tapir-http4s-server::${Version.Tapir}", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}", + ivy"org.json4s::json4s-core::${Version.Json4s}", + ivy"org.json4s::json4s-jackson:${Version.Json4s}", + ivy"ch.qos.logback:logback-classic:1.2.7", + ivy"com.softwaremill.sttp.tapir::tapir-sttp-client::${Version.Tapir}", + ivy"com.softwaremill.sttp.client3::async-http-client-backend-cats::${Version.Sttp}" + ) + } + object circeApp extends CommonModule { + override def moduleDeps = Seq(core, server.tapir, json.circe) + + override def ivyDeps = + Agg( + ivy"org.typelevel::cats-effect::${Version.CatsEffect}", + ivy"org.http4s::http4s-dsl::${Version.Http4s}", + ivy"org.http4s::http4s-circe::${Version.Http4s}", + ivy"org.http4s::http4s-blaze-server::${Version.Http4s}", + ivy"com.softwaremill.sttp.tapir::tapir-http4s-server::${Version.Tapir}", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}", + ivy"ch.qos.logback:logback-classic:1.2.7", + ivy"com.softwaremill.sttp.tapir::tapir-sttp-client::${Version.Tapir}", + ivy"com.softwaremill.sttp.client3::async-http-client-backend-cats::${Version.Sttp}", + ivy"io.circe::circe-literal::${Version.Circe}" + ) + } + object json4sAndTrace4cats extends CommonModule { + override def moduleDeps = Seq(core, server.tapir, json.json4s, trace4cats) + + override def ivyDeps = + Agg( + ivy"org.typelevel::cats-effect::${Version.CatsEffect}", + ivy"org.http4s::http4s-dsl::${Version.Http4s}", + ivy"org.http4s::http4s-blaze-server::${Version.Http4s}", + ivy"com.softwaremill.sttp.tapir::tapir-http4s-server::${Version.Tapir}", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}", + ivy"org.json4s::json4s-core::${Version.Json4s}", + ivy"org.json4s::json4s-jackson:${Version.Json4s}", + ivy"io.janstenpickle::trace4cats-log-exporter::${Version.Trace4cats}", + ivy"io.janstenpickle::trace4cats-avro-exporter::${Version.Trace4cats}", + ivy"ch.qos.logback:logback-classic:1.2.7", + ivy"com.softwaremill.sttp.tapir::tapir-sttp-client::${Version.Tapir}", + ivy"com.softwaremill.sttp.client3::async-http-client-backend-cats::${Version.Sttp}" + ) + } + object circeFs2 extends CommonModule { + override def moduleDeps = Seq(core, server.fs2, json.circe) + override def ivyDeps = Agg( + ivy"co.fs2::fs2-core::3.2.5", + ivy"co.fs2::fs2-io::3.2.5", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}", + ivy"com.github.jnr:jnr-unixsocket:0.38.8" + ) + } + + object tapirWebsocket extends CommonModule{ + override def moduleDeps = Seq(core, server.fs2, json.circe, server.tapir) + override def ivyDeps = Agg( + ivy"co.fs2::fs2-core::3.2.5", + ivy"org.typelevel::cats-effect::${Version.CatsEffect}", + ivy"org.http4s::http4s-dsl::${Version.Http4s}", + ivy"org.http4s::http4s-circe::${Version.Http4s}", + ivy"org.http4s::http4s-blaze-server::${Version.Http4s}", + ivy"com.softwaremill.sttp.tapir::tapir-http4s-server::${Version.Tapir}", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}", + ivy"ch.qos.logback:logback-classic:1.2.7", + ivy"com.softwaremill.sttp.tapir::tapir-sttp-client::${Version.Tapir}", + ivy"com.softwaremill.sttp.client3::async-http-client-backend-fs2::${Version.Sttp}", + ivy"io.circe::circe-literal::${Version.Circe}" + ) + } +} + +object trace4cats extends CommonModule with ArmadilloPublishModule { + override def moduleDeps = Seq(core) + override def ivyDeps = Agg( + ivy"io.janstenpickle::trace4cats-base::${Version.Trace4cats}", + ivy"io.janstenpickle::trace4cats-core::${Version.Trace4cats}", + ivy"io.janstenpickle::trace4cats-inject::${Version.Trace4cats}", + ivy"com.softwaremill.sttp.tapir::tapir-cats::${Version.Tapir}" + ) +} + +trait BaseModule extends ScalaModule with ScalafmtModule with TpolecatModule with ScalafixModule with ScalaMetalsSupport { + override def semanticDbVersion = "4.4.32" + override def scalafixScalaBinaryVersion = "2.13" + override def scalacOptions = T { + super.scalacOptions().filterNot(Set("-Xfatal-warnings", "-Xsource:3")) ++ Seq( + "-Ymacro-annotations", + "-Ywarn-value-discard" + ) + } + override def scalafixIvyDeps = + Agg(ivy"com.github.liancheng::organize-imports:0.6.0") +} + +trait CommonTestModule extends BaseModule with TestModule { + val WeaverDep = ivy"com.disneystreaming::weaver-cats:0.7.11" + + override def ivyDeps = Agg(WeaverDep) + override def testFramework = "weaver.framework.CatsEffect" +} + +trait CommonModule extends BaseModule { + def scalaVersion = "2.13.8" + override def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg( + ivy"org.typelevel:::kind-projector:0.13.2" + ) +} + +trait ArmadilloPublishModule extends PublishModule { + + def publishVersion = VcsVersion.vcsState().format() + + def pomSettings = PomSettings( + description = artifactName(), + organization = "io.iohk.armadillo", + url = "https://github.com/input-output-hk/armadillo", + licenses = Seq(License.`Apache-2.0`), + versionControl = VersionControl.github("input-output-hk", "armadillo"), + developers = Seq( + Developer("ghostbuster91", "Kasper Kondzielski", "https://github.com/ghostbuster91"), + Developer("dleflohic", "Damien Le Flohic", "https://github.com/dleflohic"), + Developer("AurelienRichez", "Aurélien Richez", "https://github.com/AurelienRichez"), + Developer("AmbientTea", "Nikolaos Dymitriadis", "https://github.com/AmbientTea") + ) + ) + + override def artifactName: T[String] = s"armadillo-${millModuleSegments.parts.mkString("-")}" +} + +object Version { + val Trace4cats = "0.13.1" + val Tapir = "1.1.2" + val Http4s = "0.23.10" + val Json4s = "4.0.6" + val Circe = "0.14.1" + val Sttp = "3.8.2" + val CatsEffect = "3.3.12" + val Apispec = "0.3.1" +} diff --git a/core/src/io/iohk/armadillo/Armadillo.scala b/core/src/io/iohk/armadillo/Armadillo.scala new file mode 100644 index 0000000..7816429 --- /dev/null +++ b/core/src/io/iohk/armadillo/Armadillo.scala @@ -0,0 +1,83 @@ +package io.iohk.armadillo + +import sttp.tapir.Mapping +import sttp.tapir.typelevel.ErasureSameAsType + +import scala.reflect.ClassTag + +trait Armadillo { + + def jsonRpcEndpoint( + methodName: MethodName, + paramStructure: ParamStructure = ParamStructure.Either + ): JsonRpcEndpoint[Unit, Unit, Unit] = + JsonRpcEndpoint( + methodName = methodName, + paramStructure = paramStructure, + input = JsonRpcInput.emptyInput, + output = JsonRpcOutput.emptyOutput, + error = JsonRpcErrorOutput.emptyOutput, + info = JsonRpcEndpointInfo.Empty + ) + + def param[T: JsonRpcCodec](name: String): JsonRpcInput.Basic[T] = + JsonRpcIO.Single(implicitly[JsonRpcCodec[T]], JsonRpcIoInfo.empty[T], name) + + def result[T: JsonRpcCodec](name: String): JsonRpcOutput.Basic[T] = + JsonRpcIO.Single(implicitly[JsonRpcCodec[T]], JsonRpcIoInfo.empty[T], name) + + def fixedError[T](code: Int, message: String): JsonRpcErrorOutput[T] = + JsonRpcErrorOutput.Fixed[T](code, message) + + def fixedErrorWithData[T](code: Int, message: String)(implicit + codec: JsonRpcCodec[T] + ): JsonRpcErrorOutput[T] = + JsonRpcErrorOutput.FixedWithData[T](code, message, codec) + + def errorNoData: JsonRpcErrorOutput[JsonRpcError.NoData] = + JsonRpcErrorOutput.SingleNoData() + + def errorWithData[T](implicit codec: JsonRpcCodec[JsonRpcError[T]]): JsonRpcErrorOutput[JsonRpcError[T]] = + JsonRpcErrorOutput.SingleWithData(codec) + + def oneOf[T]( + firstVariant: JsonRpcErrorOutput.OneOfVariant[_ <: T], + otherVariants: JsonRpcErrorOutput.OneOfVariant[_ <: T]* + ): JsonRpcErrorOutput.OneOf[T, T] = + JsonRpcErrorOutput.OneOf[T, T](firstVariant +: otherVariants.toList, Mapping.id) + + def oneOfVariant[T: ClassTag: ErasureSameAsType](output: JsonRpcErrorOutput[T]): JsonRpcErrorOutput.OneOfVariant[T] = + oneOfVariantClassMatcher(output, implicitly[ClassTag[T]].runtimeClass) + + /** Create a one-of-variant which uses `output` if the provided value (when interpreting as a server matches the `matcher` predicate. + * + * Should be used in [[oneOf]] output descriptions. + */ + def oneOfVariantValueMatcher[T](output: JsonRpcErrorOutput[T])( + matcher: PartialFunction[Any, Boolean] + ): JsonRpcErrorOutput.OneOfVariant[T] = + JsonRpcErrorOutput.OneOfVariant(output, matcher.lift.andThen(_.getOrElse(false))) + + def oneOfVariantClassMatcher[T]( + output: JsonRpcErrorOutput[T], + runtimeClass: Class[_] + ): JsonRpcErrorOutput.OneOfVariant[T] = { + // when used with a primitive type or Unit, the class tag will correspond to the primitive type, but at runtime + // we'll get boxed values + val rc = primitiveToBoxedClasses.getOrElse(runtimeClass, runtimeClass) + JsonRpcErrorOutput.OneOfVariant(output, { (a: Any) => rc.isInstance(a) }) + } + + private val primitiveToBoxedClasses = Map[Class[_], Class[_]]( + classOf[Byte] -> classOf[java.lang.Byte], + classOf[Short] -> classOf[java.lang.Short], + classOf[Char] -> classOf[java.lang.Character], + classOf[Int] -> classOf[java.lang.Integer], + classOf[Long] -> classOf[java.lang.Long], + classOf[Float] -> classOf[java.lang.Float], + classOf[Double] -> classOf[java.lang.Double], + classOf[Boolean] -> classOf[java.lang.Boolean], + java.lang.Void.TYPE -> classOf[scala.runtime.BoxedUnit] + ) + +} diff --git a/core/src/io/iohk/armadillo/JsonRpc.scala b/core/src/io/iohk/armadillo/JsonRpc.scala new file mode 100644 index 0000000..4fc4845 --- /dev/null +++ b/core/src/io/iohk/armadillo/JsonRpc.scala @@ -0,0 +1,80 @@ +package io.iohk.armadillo + +import sttp.tapir.SchemaType.SchemaWithValue +import sttp.tapir.{Schema, SchemaType} + +case class JsonRpcRequest[Raw](jsonrpc: String, method: String, params: Option[Raw], id: Option[JsonRpcId]) { + def isNotification: Boolean = id.isEmpty +} +object JsonRpcRequest { + implicit def schema[Raw: Schema]: Schema[JsonRpcRequest[Raw]] = Schema.derived[JsonRpcRequest[Raw]] + + def v2[Raw](method: String, params: Raw, id: JsonRpcId): JsonRpcRequest[Raw] = + JsonRpcRequest(JsonRpcVersion_2_0, method, Some(params), Some(id)) + + def v2[Raw](method: String, id: JsonRpcId): JsonRpcRequest[Raw] = + JsonRpcRequest(JsonRpcVersion_2_0, method, None, Some(id)) +} +object Notification { + def v2[Raw](method: String, params: Raw): JsonRpcRequest[Raw] = + JsonRpcRequest(JsonRpcVersion_2_0, method, Some(params), None) + + def v2[Raw](method: String): JsonRpcRequest[Raw] = + JsonRpcRequest(JsonRpcVersion_2_0, method, None, None) +} + +sealed trait JsonRpcId +object JsonRpcId { + case class IntId(value: Int) extends JsonRpcId + case class StringId(value: String) extends JsonRpcId + + implicit def intAsId(v: Int): JsonRpcId.IntId = JsonRpcId.IntId(v) + implicit def stringAsId(v: String): JsonRpcId.StringId = JsonRpcId.StringId(v) + + implicit val schema: Schema[JsonRpcId] = { + val s1 = Schema.schemaForInt + val s2 = Schema.schemaForString + Schema[JsonRpcId]( + SchemaType.SCoproduct(List(s1, s2), None) { + case IntId(v) => Some(SchemaWithValue(s1, v)) + case StringId(v) => Some(SchemaWithValue(s2, v)) + }, + for { + na <- s1.name + nb <- s2.name + } yield Schema.SName("JsonRpcId", List(na.show, nb.show)) + ) + } +} + +sealed trait JsonRpcResponse[Raw] { + def jsonrpc: String +} +object JsonRpcResponse { // TODO change to success and error sub types + def v2[Raw](result: Raw, id: JsonRpcId): JsonRpcSuccessResponse[Raw] = + JsonRpcSuccessResponse[Raw](JsonRpcVersion_2_0, result, id) + def error_v2[Raw](error: Raw): JsonRpcErrorResponse[Raw] = error_v2[Raw](error, None) + def error_v2[Raw](error: Raw, id: JsonRpcId): JsonRpcErrorResponse[Raw] = error_v2[Raw](error, Some(id)) + def error_v2[Raw](error: Raw, id: Option[JsonRpcId]): JsonRpcErrorResponse[Raw] = + JsonRpcErrorResponse[Raw](JsonRpcVersion_2_0, error, id) +} + +final case class JsonRpcSuccessResponse[Raw](jsonrpc: String, result: Raw, id: JsonRpcId) extends JsonRpcResponse[Raw] +object JsonRpcSuccessResponse { + implicit def schema[Raw: Schema]: Schema[JsonRpcSuccessResponse[Raw]] = Schema.derived[JsonRpcSuccessResponse[Raw]] +} + +final case class JsonRpcErrorResponse[Raw](jsonrpc: String, error: Raw, id: Option[JsonRpcId]) extends JsonRpcResponse[Raw] +object JsonRpcErrorResponse { + implicit def schema[Raw: Schema]: Schema[JsonRpcErrorResponse[Raw]] = Schema.derived[JsonRpcErrorResponse[Raw]] +} + +final case class JsonRpcError[Data](code: Int, message: String, data: Data) +object JsonRpcError { + type NoData = JsonRpcError[Unit] + + implicit def schema[T: Schema]: Schema[JsonRpcError[T]] = Schema.derived[JsonRpcError[T]] + + def noData(code: Int, msg: String): JsonRpcError[Unit] = JsonRpcError(code, msg, ()) + def withData[T](code: Int, msg: String, data: T): JsonRpcError[T] = JsonRpcError(code, msg, data) +} diff --git a/core/src/io/iohk/armadillo/JsonRpcCodec.scala b/core/src/io/iohk/armadillo/JsonRpcCodec.scala new file mode 100644 index 0000000..193f291 --- /dev/null +++ b/core/src/io/iohk/armadillo/JsonRpcCodec.scala @@ -0,0 +1,27 @@ +package io.iohk.armadillo + +import sttp.tapir.{DecodeResult, Schema, Validator} + +trait JsonRpcCodec[H] { + type L + def decode(l: L): DecodeResult[H] + def encode(h: H): L + def schema: Schema[H] + + def show(l: L): String +} +object JsonRpcCodec { + implicit class JsonRpcCodecOps[H](val codec: JsonRpcCodec[H]) { + def withValidator(validator: Validator[H]): JsonRpcCodec[H] = new JsonRpcCodec[H] { + override type L = codec.L + + override def decode(l: L): DecodeResult[H] = codec.decode(l) + + override def encode(h: H): L = codec.encode(h) + + override def schema: Schema[H] = codec.schema.copy(validator = codec.schema.validator.and(validator)) + + override def show(l: codec.L): String = codec.show(l) + } + } +} diff --git a/core/src/io/iohk/armadillo/JsonRpcEndpointIO.scala b/core/src/io/iohk/armadillo/JsonRpcEndpointIO.scala new file mode 100644 index 0000000..e50e97a --- /dev/null +++ b/core/src/io/iohk/armadillo/JsonRpcEndpointIO.scala @@ -0,0 +1,223 @@ +package io.iohk.armadillo + +import sttp.tapir.typelevel.ParamConcat +import sttp.tapir.{Mapping, Validator} + +case class JsonRpcEndpoint[I, E, O]( + methodName: MethodName, + paramStructure: ParamStructure, + input: JsonRpcInput[I], + output: JsonRpcOutput[O], + error: JsonRpcErrorOutput[E], + info: JsonRpcEndpointInfo +) { + def in[J](i: JsonRpcInput[J]): JsonRpcEndpoint[J, E, O] = + copy(input = i) + + def serverLogic[F[_]](f: I => F[Either[E, O]]): JsonRpcServerEndpoint.Full[I, E, O, F] = { + import sttp.monad.syntax._ + JsonRpcServerEndpoint[I, E, O, F](this, implicit m => i => f(i).map(x => x)) + } + + def out[P](name: String)(implicit jsonRpcCodec: JsonRpcCodec[P]): JsonRpcEndpoint[I, E, P] = + copy(output = result[P](name)) + + def out[P](o: JsonRpcOutput[P]): JsonRpcEndpoint[I, E, P] = copy(output = o) + + def errorOut[F](error: JsonRpcErrorOutput[F]): JsonRpcEndpoint[I, F, O] = + copy(error = error) + + def showDetail: String = + s"JsonRpcEndpoint(method: $methodName, in: ${input.show}, errout: ${error.show}, out: ${output.show})" + + def withInfo(info: JsonRpcEndpointInfo): JsonRpcEndpoint[I, E, O] = copy(info = info) + + def summary(s: String): JsonRpcEndpoint[I, E, O] = withInfo(info.summary(s)) + def description(d: String): JsonRpcEndpoint[I, E, O] = withInfo(info.description(d)) + def deprecated(): JsonRpcEndpoint[I, E, O] = withInfo(info.deprecated(true)) + def tag(t: JsonRpcEndpointTag): JsonRpcEndpoint[I, E, O] = withInfo(info.tag(t)) + def tags(ts: List[JsonRpcEndpointTag]): JsonRpcEndpoint[I, E, O] = withInfo(info.tags(ts)) + def externalDocs(ed: JsonRpcEndpointExternalDocs): JsonRpcEndpoint[I, E, O] = withInfo(info.externalDocs(ed)) +} + +object JsonRpcEndpoint { + implicit val ordering: Ordering[JsonRpcEndpoint[_, _, _]] = Ordering.by(_.methodName.asString) +} + +case class JsonRpcEndpointInfo( + summary: Option[String], + description: Option[String], + tags: Vector[JsonRpcEndpointTag], + deprecated: Boolean, + externalDocs: Option[JsonRpcEndpointExternalDocs] +) { + def summary(s: String): JsonRpcEndpointInfo = copy(summary = Some(s)) + def description(d: String): JsonRpcEndpointInfo = copy(description = Some(d)) + def deprecated(d: Boolean): JsonRpcEndpointInfo = copy(deprecated = d) + def tags(ts: List[JsonRpcEndpointTag]): JsonRpcEndpointInfo = copy(tags = tags ++ ts) + def tag(t: JsonRpcEndpointTag): JsonRpcEndpointInfo = copy(tags = tags :+ t) + def externalDocs(ed: JsonRpcEndpointExternalDocs): JsonRpcEndpointInfo = copy(externalDocs = Some(ed)) +} + +object JsonRpcEndpointInfo { + val Empty: JsonRpcEndpointInfo = JsonRpcEndpointInfo(None, None, Vector.empty, deprecated = false, None) +} + +case class JsonRpcEndpointTag( + name: String, + summary: Option[String] = None, + description: Option[String] = None, + externalDocs: Option[JsonRpcEndpointExternalDocs] = None +) { + def summary(s: String): JsonRpcEndpointTag = copy(summary = Some(s)) + def description(d: String): JsonRpcEndpointTag = copy(description = Some(d)) + def externalDocs(ed: JsonRpcEndpointExternalDocs): JsonRpcEndpointTag = copy(externalDocs = Some(ed)) +} + +case class JsonRpcEndpointExternalDocs(url: String, description: Option[String] = None) { + def description(d: String): JsonRpcEndpointExternalDocs = copy(description = Some(d)) +} + +sealed trait JsonRpcEndpointTransput[T] { + private[armadillo] type ThisType[A] + + def show: String +} + +object JsonRpcEndpointTransput { + sealed trait Basic[T] extends JsonRpcEndpointTransput[T] { + def summary(s: String): ThisType[T] = withInfo(info.summary(s)) + def description(d: String): ThisType[T] = withInfo(info.description(d)) + def example(a: T): ThisType[T] = examples(Set(a)) + def examples(l: Set[T]): ThisType[T] = withInfo(info.examples(l)) + + def withInfo(value: JsonRpcIoInfo[T]): ThisType[T] + def info: JsonRpcIoInfo[T] + } +} + +sealed trait JsonRpcIO[T] extends JsonRpcInput[T] with JsonRpcOutput[T] with JsonRpcEndpointTransput[T] + +sealed trait JsonRpcInput[T] extends JsonRpcEndpointTransput[T] { + private[armadillo] type ThisType[X] <: JsonRpcInput[X] + + def and[U, TU](param: JsonRpcInput[U])(implicit concat: ParamConcat.Aux[T, U, TU]): JsonRpcInput[TU] = { + JsonRpcInput.Pair(this, param) + } +} + +object JsonRpcInput { + val emptyInput: JsonRpcInput[Unit] = JsonRpcIO.Empty() + + sealed trait Basic[T] extends JsonRpcInput[T] with JsonRpcEndpointTransput.Basic[T] { + override private[armadillo] type ThisType[X] <: JsonRpcInput.Basic[X] + + def deprecated(): ThisType[T] = withInfo(info.deprecated(true)) + + def validate(validator: Validator[T]): ThisType[T] + } + + case class Pair[T, U, TU](left: JsonRpcInput[T], right: JsonRpcInput[U]) extends JsonRpcInput[TU] { + override def show: String = { + def flattenedPairs(et: JsonRpcInput[_]): Vector[JsonRpcInput[_]] = + et match { + case p: Pair[_, _, _] => flattenedPairs(p.left) ++ flattenedPairs(p.right) + case other => Vector(other) + } + flattenedPairs(this).map(_.show).mkString("[", ",", "]") + } + } +} + +sealed trait JsonRpcErrorOutput[T] extends JsonRpcEndpointTransput[T] + +object JsonRpcErrorOutput { + def emptyOutput: JsonRpcErrorOutput[Unit] = JsonRpcErrorOutput.Empty() + def fixed(code: Int, message: String): JsonRpcErrorOutput[Unit] = JsonRpcErrorOutput.Fixed(code, message) + + sealed trait Atom[T] extends JsonRpcErrorOutput[T] { + type DATA = T + } + + case class SingleNoData() extends Atom[JsonRpcError.NoData] { + override def show: String = s"singleNoData" + } + + case class SingleWithData[T](codec: JsonRpcCodec[JsonRpcError[T]]) extends Atom[JsonRpcError[T]] { + override def show: String = s"singleWithData" + } + + case class Fixed[T]( + code: Int, + message: String + ) extends Atom[T] { + override def show: String = s"FixedJsonRpcError(message: $message, code: $code)" + } + + case class FixedWithData[T]( + code: Int, + message: String, + codec: JsonRpcCodec[T] + ) extends Atom[T] { + override def show: String = s"FixedJsonRpcErrorWithData(message: $message, code: $code)" + } + + case class Empty() extends Atom[Unit] { + override def show: String = "-" + } + + case class OneOfVariant[O] private[armadillo] (output: JsonRpcErrorOutput[O], appliesTo: Any => Boolean) + + case class OneOf[O, T](variants: List[OneOfVariant[_ <: O]], mapping: Mapping[O, T]) extends JsonRpcErrorOutput[T] { + override def show: String = s"OneOfError(${variants.map(_.output.show).mkString(",")})" + } +} + +sealed trait JsonRpcOutput[T] extends JsonRpcEndpointTransput[T] + +object JsonRpcOutput { + def emptyOutput: JsonRpcOutput[Unit] = JsonRpcIO.Empty() + + sealed trait Basic[T] extends JsonRpcOutput[T] with JsonRpcEndpointTransput.Basic[T] { + override private[armadillo] type ThisType[X] <: JsonRpcOutput.Basic[X] + } +} + +object JsonRpcIO { + + case class Empty[T]() extends JsonRpcIO[T] { + override def show: String = "-" + } + + case class Single[T](codec: JsonRpcCodec[T], info: JsonRpcIoInfo[T], name: String) + extends JsonRpcIO[T] + with JsonRpcInput.Basic[T] + with JsonRpcOutput.Basic[T] { + override def show: String = s"single($name)" + override private[armadillo] type ThisType[X] = Single[X] + override def withInfo(info: JsonRpcIoInfo[T]): Single[T] = copy(info = info) + + override def validate(validator: Validator[T]): Single[T] = copy(codec = codec.withValidator(validator)) + + def optional(implicit codec: JsonRpcCodec[Option[T]]): Single[Option[T]] = new Single[Option[T]](codec, info.optional, name) + } +} + +case class JsonRpcIoInfo[T]( + description: Option[String], + summary: Option[String], + deprecated: Option[Boolean] = None, + examples: Set[T] = Set.empty[T] +) { + def description(d: String): JsonRpcIoInfo[T] = copy(description = Some(d)) + def summary(s: String): JsonRpcIoInfo[T] = copy(summary = Some(s)) + def deprecated(d: Boolean): JsonRpcIoInfo[T] = copy(deprecated = Some(d)) + def example(e: T): JsonRpcIoInfo[T] = copy(examples = examples + e) + def examples(es: Set[T]): JsonRpcIoInfo[T] = copy(examples = es) + + def optional: JsonRpcIoInfo[Option[T]] = copy(examples = examples.map(Some(_))) +} + +object JsonRpcIoInfo { + def empty[T]: JsonRpcIoInfo[T] = JsonRpcIoInfo[T](None, None, None, Set.empty[T]) +} diff --git a/core/src/io/iohk/armadillo/JsonRpcServerEndpoint.scala b/core/src/io/iohk/armadillo/JsonRpcServerEndpoint.scala new file mode 100644 index 0000000..442ad72 --- /dev/null +++ b/core/src/io/iohk/armadillo/JsonRpcServerEndpoint.scala @@ -0,0 +1,44 @@ +package io.iohk.armadillo + +import sttp.monad.MonadError + +abstract class JsonRpcServerEndpoint[F[_]] { + type INPUT + type ERROR_OUTPUT + type OUTPUT + + def endpoint: JsonRpcEndpoint[INPUT, ERROR_OUTPUT, OUTPUT] + def logic: MonadError[F] => INPUT => F[Either[ERROR_OUTPUT, OUTPUT]] +} + +object JsonRpcServerEndpoint { + + /** The full type of a server endpoint, capturing the types of all input/output parameters. Most of the time, the simpler + * `JsonRpcServerEndpoint[F]` can be used instead. + */ + type Full[_INPUT, _ERROR_OUTPUT, _OUTPUT, F[_]] = JsonRpcServerEndpoint[F] { + type INPUT = _INPUT + type ERROR_OUTPUT = _ERROR_OUTPUT + type OUTPUT = _OUTPUT + } + + def apply[INPUT, ERROR_OUTPUT, OUTPUT, F[_]]( + endpoint: JsonRpcEndpoint[INPUT, ERROR_OUTPUT, OUTPUT], + logic: MonadError[F] => INPUT => F[Either[ERROR_OUTPUT, OUTPUT]] + ): JsonRpcServerEndpoint.Full[INPUT, ERROR_OUTPUT, OUTPUT, F] = { + type _INPUT = INPUT + type _ERROR_OUTPUT = ERROR_OUTPUT + type _OUTPUT = OUTPUT + val e = endpoint + val f = logic + new JsonRpcServerEndpoint[F] { + override type INPUT = _INPUT + override type ERROR_OUTPUT = _ERROR_OUTPUT + override type OUTPUT = _OUTPUT + + override def endpoint: JsonRpcEndpoint[INPUT, ERROR_OUTPUT, OUTPUT] = e + + override def logic: MonadError[F] => INPUT => F[Either[ERROR_OUTPUT, OUTPUT]] = f + } + } +} diff --git a/core/src/io/iohk/armadillo/MethodName.scala b/core/src/io/iohk/armadillo/MethodName.scala new file mode 100644 index 0000000..afc2200 --- /dev/null +++ b/core/src/io/iohk/armadillo/MethodName.scala @@ -0,0 +1,28 @@ +package io.iohk.armadillo + +class MethodName(private val value: String) extends AnyVal { + def asString: String = value +} + +trait MethodNameInterpolator { + implicit class MethodNameContext(sc: StringContext) { + def m(args: Any*): MethodName = MethodNameInterpolator.interpolate(sc, args) + } +} +object MethodNameInterpolator { + def interpolate(sc: StringContext, args: Any*): MethodName = { + val strings = sc.parts.iterator + val expressions = args.iterator + val buf = new StringBuilder(strings.next()) + while (strings.hasNext) { + buf.append(expressions.next()) + buf.append(strings.next()) + } + val str = buf.toString() + if (str.startsWith("rpc.")) { + throw new IllegalArgumentException("'rpc.' prefix is reserved for rpc-internal methods and extensions") + } else { + new MethodName(str) + } + } +} diff --git a/core/src/io/iohk/armadillo/ParamStructure.scala b/core/src/io/iohk/armadillo/ParamStructure.scala new file mode 100644 index 0000000..09549ce --- /dev/null +++ b/core/src/io/iohk/armadillo/ParamStructure.scala @@ -0,0 +1,8 @@ +package io.iohk.armadillo + +sealed trait ParamStructure +object ParamStructure { + case object Either extends ParamStructure + case object ByName extends ParamStructure + case object ByPosition extends ParamStructure +} diff --git a/core/src/io/iohk/armadillo/package.scala b/core/src/io/iohk/armadillo/package.scala new file mode 100644 index 0000000..6a0e844 --- /dev/null +++ b/core/src/io/iohk/armadillo/package.scala @@ -0,0 +1,8 @@ +package io.iohk + +package object armadillo extends Armadillo with MethodNameInterpolator { + val JsonRpcVersion_2_0: String = "2.0" + + type AnyEndpoint = JsonRpcEndpoint[_, _, _] + type AnyRequest = JsonRpcRequest[_] +} diff --git a/docs/server_interpreter_logic.md b/docs/server_interpreter_logic.md new file mode 100644 index 0000000..f052103 --- /dev/null +++ b/docs/server_interpreter_logic.md @@ -0,0 +1,25 @@ +```mermaid +flowchart TD + A[path & contentType json] --> B{parse} + B -->|Other| C[Parse error] + B -->|Object| E{decodeRequest} + B -->|Array| G[/traverse singleRequest/] + G -->|wrap| O[JsonRpcResponse] + subgraph single [Single requst processnig] + E -->|decodeFailure| F[Invalid Request] + E -->|decodeSuccess| H{matchMethod} + H -->|matched| decodeParams + H -->|notFound| L[Method not found] + decodeParams --> K{execute logic} + K -->|F.success| M[JsonRpcResponse] + K -->|F.error| N[Internal error] + end + + subgraph decodeParams [Decode params] + a{decodeAsObject} + a -->|decodeSuccess| b[params] + a -->|decodeFailure| c{decodeVector} + c -->|decodeSuccess| b[params] + a & c -->|decodeFailure| J[Invalid Params] + end +``` \ No newline at end of file diff --git a/eth.yaml b/eth.yaml new file mode 100644 index 0000000..add44ec --- /dev/null +++ b/eth.yaml @@ -0,0 +1,1786 @@ +{ + "openrpc": "1.0.0", + "info": { + "version": "1.0.10", + "title": "Ethereum JSON-RPC", + "description": "This API lets you interact with an EVM-based client via JSON-RPC", + "license": { + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0.html" + } + }, + "methods": [ + { + "name": "web3_clientVersion", + "description": "Returns the version of the current client", + "summary": "current client version", + "params": [ ], + "result": { + "name": "clientVersion", + "description": "client version", + "schema": { + "title": "clientVersion", + "type": "string" + } + } + }, + { + "name": "web3_sha3", + "summary": "Hashes data", + "description": "Hashes data using the Keccak-256 algorithm", + "params": [ + { + "name": "data", + "description": "data to hash using the Keccak-256 algorithm", + "summary": "data to hash", + "schema": { + "title": "data", + "type": "string", + "pattern": "^0x[a-fA-F\\d]+$" + } + } + ], + "result": { + "name": "hashedData", + "description": "Keccak-256 hash of the given data", + "schema": { + "$ref": "#/components/schemas/Keccak" + } + }, + "examples": [ + { + "name": "sha3Example", + "params": [ + { + "name": "sha3ParamExample", + "value": "0x68656c6c6f20776f726c64" + } + ], + "result": { + "name": "sha3ResultExample", + "value": "0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad" + } + } + ] + }, + { + "name": "net_listening", + "summary": "returns listening status", + "description": "Determines if this client is listening for new network connections.", + "params": [ ], + "result": { + "name": "netListeningResult", + "description": "`true` if listening is active or `false` if listening is not active", + "schema": { + "title": "isNetListening", + "type": "boolean" + } + }, + "examples": [ + { + "name": "netListeningTrueExample", + "description": "example of true result for net_listening", + "params": [ ], + "result": { + "name": "netListeningExampleFalseResult", + "value": true + } + } + ] + }, + { + "name": "net_peerCount", + "summary": "number of peers", + "description": "Returns the number of peers currently connected to this client.", + "params": [ ], + "result": { + "name": "quantity", + "description": "number of connected peers.", + "schema": { + "title": "numConnectedPeers", + "description": "Hex representation of number of connected peers", + "type": "string" + } + } + }, + { + "name": "net_version", + "summary": "Network identifier associated with network", + "description": "Returns the network ID associated with the current network.", + "params": [ ], + "result": { + "name": "networkId", + "description": "Network ID associated with the current network", + "schema": { + "title": "networkId", + "type": "string", + "pattern": "^[\\d]+$" + } + } + }, + { + "name": "eth_blockNumber", + "summary": "Returns the number of most recent block.", + "params": [ ], + "result": { + "$ref": "#/components/contentDescriptors/BlockNumber" + } + }, + { + "name": "eth_call", + "summary": "Executes a new message call (locally) immediately without creating a transaction on the block chain.", + "params": [ + { + "$ref": "#/components/contentDescriptors/Transaction" + }, + { + "$ref": "#/components/contentDescriptors/BlockNumber" + } + ], + "result": { + "name": "returnValue", + "description": "The return value of the executed contract", + "schema": { + "$ref": "#/components/schemas/Bytes" + } + } + }, + { + "name": "eth_chainId", + "summary": "Returns the currently configured chain id", + "description": "Returns the currently configured chain id, a value used in replay-protected transaction signing as introduced by [EIP-155](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md).", + "params": [ ], + "result": { + "name": "chainId", + "description": "hex format integer of the current chain id. Defaults are ETC=61, ETH=1, Morden=62.", + "schema": { + "title": "chainId", + "type": "string", + "pattern": "^0x[a-fA-F\\d]+$" + } + } + }, + { + "name": "eth_coinbase", + "summary": "Returns the client coinbase address.", + "params": [ ], + "result": { + "name": "address", + "description": "The address owned by the client that is used as default for things like the mining reward", + "schema": { + "$ref": "#/components/schemas/Address" + } + } + }, + { + "name": "eth_estimateGas", + "summary": "Generates and returns an estimate of how much gas is necessary to allow the transaction to complete. The transaction will not be added to the blockchain. Note that the estimate may be significantly more than the amount of gas actually used by the transaction, for a variety of reasons including EVM mechanics and node performance.", + "params": [ + { + "$ref": "#/components/contentDescriptors/Transaction" + } + ], + "result": { + "name": "gasUsed", + "description": "The amount of gas used", + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + }, + { + "name": "eth_gasPrice", + "summary": "Returns the current price per gas in wei", + "params": [ ], + "result": { + "$ref": "#/components/contentDescriptors/GasPrice" + } + }, + { + "name": "eth_getBalance", + "summary": "Returns Ether balance of a given or account or contract", + "params": [ + { + "name": "address", + "required": true, + "description": "The address of the account or contract", + "schema": { + "$ref": "#/components/schemas/Address" + } + }, + { + "name": "blockNumber", + "description": "A BlockNumber at which to request the balance", + "schema": { + "$ref": "#/components/schemas/BlockNumber" + } + } + ], + "result": { + "name": "getBalanceResult", + "schema": { + "$ref": "#/components/schemas/IntegerOrNull" + } + } + }, + { + "name": "eth_getBlockByHash", + "summary": "Gets a block for a given hash", + "params": [ + { + "name": "blockHash", + "required": true, + "schema": { + "$ref": "#/components/schemas/BlockHash" + } + }, + { + "name": "includeTransactions", + "description": "If `true` it returns the full transaction objects, if `false` only the hashes of the transactions.", + "required": true, + "schema": { + "title": "isTransactionsIncluded", + "type": "boolean" + } + } + ], + "result": { + "name": "getBlockByHashResult", + "schema": { + "$ref": "#/components/schemas/BlockOrNull" + } + } + }, + { + "name": "eth_getBlockByNumber", + "summary": "Gets a block for a given number", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockNumber" + }, + { + "name": "includeTransactions", + "description": "If `true` it returns the full transaction objects, if `false` only the hashes of the transactions.", + "required": true, + "schema": { + "title": "isTransactionsIncluded", + "type": "boolean" + } + } + ], + "result": { + "name": "getBlockByNumberResult", + "schema": { + "$ref": "#/components/schemas/BlockOrNull" + } + } + }, + { + "name": "eth_getBlockTransactionCountByHash", + "summary": "Returns the number of transactions in a block from a block matching the given block hash.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockHash" + } + ], + "result": { + "name": "blockTransactionCountByHash", + "description": "The Number of total transactions in the given block", + "schema": { + "$ref": "#/components/schemas/IntegerOrNull" + } + } + }, + { + "name": "eth_getBlockTransactionCountByNumber", + "summary": "Returns the number of transactions in a block from a block matching the given block number.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockNumber" + } + ], + "result": { + "name": "blockTransactionCountByHash", + "description": "The Number of total transactions in the given block", + "schema": { + "$ref": "#/components/schemas/IntegerOrNull" + } + } + }, + { + "name": "eth_getCode", + "summary": "Returns code at a given contract address", + "params": [ + { + "name": "address", + "required": true, + "description": "The address of the contract", + "schema": { + "$ref": "#/components/schemas/Address" + } + }, + { + "name": "blockNumber", + "description": "A BlockNumber of which the code existed", + "schema": { + "$ref": "#/components/schemas/BlockNumber" + } + } + ], + "result": { + "name": "bytes", + "schema": { + "$ref": "#/components/schemas/Bytes" + } + } + }, + { + "name": "eth_getFilterChanges", + "summary": "Polling method for a filter, which returns an array of logs which occurred since last poll.", + "params": [ + { + "name": "filterId", + "required": true, + "schema": { + "$ref": "#/components/schemas/FilterId" + } + } + ], + "result": { + "name": "logResult", + "schema": { + "title": "logResult", + "type": "array", + "items": { + "$ref": "#/components/schemas/Log" + } + } + } + }, + { + "name": "eth_getFilterLogs", + "summary": "Returns an array of all logs matching filter with given id.", + "params": [ + { + "name": "filterId", + "required": true, + "schema": { + "$ref": "#/components/schemas/FilterId" + } + } + ], + "result": { + "$ref": "#/components/contentDescriptors/Logs" + } + }, + { + "name": "eth_getRawTransactionByHash", + "summary": "Returns raw transaction data of a transaction with the given hash.", + "params": [ + { + "$ref": "#/components/contentDescriptors/TransactionHash" + } + ], + "result": { + "name": "rawTransactionByHash", + "description": "The raw transaction data", + "schema": { + "$ref": "#/components/schemas/Bytes" + } + } + }, + { + "name": "eth_getRawTransactionByBlockHashAndIndex", + "summary": "Returns raw transaction data of a transaction with the block hash and index of which it was mined.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockHash" + }, + { + "name": "index", + "description": "The ordering in which a transaction is mined within its block.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + ], + "result": { + "name": "rawTransaction", + "description": "The raw transaction data", + "schema": { + "$ref": "#/components/schemas/Bytes" + } + } + }, + { + "name": "eth_getRawTransactionByBlockNumberAndIndex", + "summary": "Returns raw transaction data of a transaction with the block number and index of which it was mined.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockNumber" + }, + { + "name": "index", + "description": "The ordering in which a transaction is mined within its block.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + ], + "result": { + "name": "rawTransaction", + "description": "The raw transaction data", + "schema": { + "$ref": "#/components/schemas/Bytes" + } + } + }, + { + "name": "eth_getLogs", + "summary": "Returns an array of all logs matching a given filter object.", + "params": [ + { + "$ref": "#/components/contentDescriptors/Filter" + } + ], + "result": { + "$ref": "#/components/contentDescriptors/Logs" + } + }, + { + "name": "eth_getStorageAt", + "summary": "Gets a storage value from a contract address, a position, and an optional blockNumber", + "params": [ + { + "$ref": "#/components/contentDescriptors/Address" + }, + { + "$ref": "#/components/contentDescriptors/Position" + }, + { + "$ref": "#/components/contentDescriptors/BlockNumber" + } + ], + "result": { + "name": "dataWord", + "schema": { + "$ref": "#/components/schemas/DataWord" + } + } + }, + { + "name": "eth_getTransactionByBlockHashAndIndex", + "summary": "Returns the information about a transaction requested by the block hash and index of which it was mined.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockHash" + }, + { + "name": "index", + "description": "The ordering in which a transaction is mined within its block.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + ], + "result": { + "$ref": "#/components/contentDescriptors/TransactionResult" + }, + "examples": [ + { + "name": "nullExample", + "params": [ + { + "name": "blockHashExample", + "value": "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + }, + { + "name": "indexExample", + "value": "0x0" + } + ], + "result": { + "name": "nullResultExample", + "value": null + } + } + ] + }, + { + "name": "eth_getTransactionByBlockNumberAndIndex", + "summary": "Returns the information about a transaction requested by the block number and index of which it was mined.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockNumber" + }, + { + "name": "index", + "description": "The ordering in which a transaction is mined within its block.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + ], + "result": { + "$ref": "#/components/contentDescriptors/TransactionResult" + } + }, + { + "name": "eth_getTransactionByHash", + "summary": "Returns the information about a transaction requested by transaction hash.", + "params": [ + { + "$ref": "#/components/contentDescriptors/TransactionHash" + } + ], + "result": { + "$ref": "#/components/contentDescriptors/TransactionResult" + } + }, + { + "name": "eth_getTransactionCount", + "summary": "Returns the number of transactions sent from an address", + "params": [ + { + "$ref": "#/components/contentDescriptors/Address" + }, + { + "$ref": "#/components/contentDescriptors/BlockNumber" + } + ], + "result": { + "name": "transactionCount", + "schema": { + "title": "nonceOrNull", + "oneOf": [ + { + "$ref": "#/components/schemas/Nonce" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + } + } + }, + { + "name": "eth_getTransactionReceipt", + "summary": "Returns the receipt information of a transaction by its hash.", + "params": [ + { + "$ref": "#/components/contentDescriptors/TransactionHash" + } + ], + "result": { + "name": "transactionReceiptResult", + "description": "returns either a receipt or null", + "schema": { + "title": "transactionReceiptOrNull", + "oneOf": [ + { + "$ref": "#/components/schemas/Receipt" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + } + } + }, + { + "name": "eth_getUncleByBlockHashAndIndex", + "summary": "Returns information about a uncle of a block by hash and uncle index position.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockHash" + }, + { + "name": "index", + "description": "The ordering in which a uncle is included within its block.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + ], + "result": { + "name": "uncle", + "schema": { + "$ref": "#/components/schemas/BlockOrNull" + } + } + }, + { + "name": "eth_getUncleByBlockNumberAndIndex", + "summary": "Returns information about a uncle of a block by hash and uncle index position.", + "params": [ + { + "name": "uncleBlockNumber", + "description": "The block in which the uncle was included", + "required": true, + "schema": { + "$ref": "#/components/schemas/BlockNumber" + } + }, + { + "name": "index", + "description": "The ordering in which a uncle is included within its block.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + ], + "result": { + "name": "uncleResult", + "description": "returns an uncle block or null", + "schema": { + "$ref": "#/components/schemas/BlockOrNull" + } + }, + "examples": [ + { + "name": "nullResultExample", + "params": [ + { + "name": "uncleBlockNumberExample", + "value": "0x0" + }, + { + "name": "uncleBlockNumberIndexExample", + "value": "0x0" + } + ], + "result": { + "name": "nullResultExample", + "value": null + } + } + ] + }, + { + "name": "eth_getUncleCountByBlockHash", + "summary": "Returns the number of uncles in a block from a block matching the given block hash.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockHash" + } + ], + "result": { + "name": "uncleCountResult", + "description": "The Number of total uncles in the given block", + "schema": { + "$ref": "#/components/schemas/IntegerOrNull" + } + } + }, + { + "name": "eth_getUncleCountByBlockNumber", + "summary": "Returns the number of uncles in a block from a block matching the given block number.", + "params": [ + { + "$ref": "#/components/contentDescriptors/BlockNumber" + } + ], + "result": { + "$ref": "#/components/contentDescriptors/UncleCountResult" + } + }, + { + "name": "eth_getProof", + "summary": "Returns the account- and storage-values of the specified account including the Merkle-proof.", + "params": [ + { + "name": "address", + "description": "The address of the account or contract", + "required": true, + "schema": { + "$ref": "#/components/schemas/Address" + } + }, + { + "name": "storageKeys", + "required": true, + "schema": { + "title": "storageKeys", + "description": "A storage key is indexed from the solidity compiler by the order it is declared. For mappings it uses the keccak of the mapping key with its position (and recursively for X-dimensional mappings)", + "items": { + "$ref": "#/components/schemas/StorageProofKey" + } + } + }, + { + "$ref": "#/components/contentDescriptors/BlockNumber" + } + ], + "result": { + "name": "account", + "schema": { + "title": "proofAccountOrNull", + "oneOf": [ + { + "title": "proofAccount", + "type": "object", + "description": "The merkle proofs of the specified account connecting them to the blockhash of the block specified", + "properties": { + "address": { + "title": "proofAccountAddress", + "description": "The address of the account or contract of the request", + "$ref": "#/components/schemas/Address" + }, + "accountProof": { + "$ref": "#/components/schemas/ProofNodes" + }, + "balance": { + "title": "proofAccountBalance", + "description": "The Ether balance of the account or contract of the request", + "$ref": "#/components/schemas/Integer" + }, + "codeHash": { + "title": "proofAccountCodeHash", + "description": "The code hash of the contract of the request (keccak(NULL) if external account)", + "$ref": "#/components/schemas/Keccak" + }, + "nonce": { + "title": "proofAccountNonce", + "description": "The transaction count of the account or contract of the request", + "$ref": "#/components/schemas/Nonce" + }, + "storageHash": { + "title": "proofAccountStorageHash", + "description": "The storage hash of the contract of the request (keccak(rlp(NULL)) if external account)", + "$ref": "#/components/schemas/Keccak" + }, + "storageProof": { + "$ref": "#/components/schemas/StorageProof" + } + } + }, + { + "$ref": "#/components/schemas/Null" + } + ] + } + } + }, + { + "name": "eth_getWork", + "summary": "Returns the hash of the current block, the seedHash, and the boundary condition to be met ('target').", + "params": [ ], + "result": { + "name": "work", + "schema": { + "title": "getWorkResults", + "type": "array", + "items": [ + { + "$ref": "#/components/schemas/PowHash" + }, + { + "$ref": "#/components/schemas/SeedHash" + }, + { + "$ref": "#/components/schemas/Difficulty" + } + ] + } + } + }, + { + "name": "eth_hashrate", + "summary": "Returns the number of hashes per second that the node is mining with.", + "params": [ ], + "result": { + "name": "hashesPerSecond", + "description": "Integer of the number of hashes per second", + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + }, + { + "name": "eth_mining", + "summary": "Returns true if client is actively mining new blocks.", + "params": [ ], + "result": { + "name": "mining", + "description": "Whether or not the client is mining", + "schema": { + "type": "boolean" + } + } + }, + { + "name": "eth_newBlockFilter", + "summary": "Creates a filter in the node, to notify when a new block arrives. To check if the state has changed, call eth_getFilterChanges.", + "params": [ ], + "result": { + "$ref": "#/components/contentDescriptors/FilterId" + } + }, + { + "name": "eth_newFilter", + "summary": "Creates a filter object, based on filter options, to notify when the state changes (logs). To check if the state has changed, call eth_getFilterChanges.", + "params": [ + { + "$ref": "#/components/contentDescriptors/Filter" + } + ], + "result": { + "name": "filterId", + "description": "The filter ID for use in `eth_getFilterChanges`", + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + }, + { + "name": "eth_newPendingTransactionFilter", + "summary": "Creates a filter in the node, to notify when new pending transactions arrive. To check if the state has changed, call eth_getFilterChanges.", + "params": [ ], + "result": { + "$ref": "#/components/contentDescriptors/FilterId" + } + }, + { + "name": "eth_pendingTransactions", + "summary": "Returns the transactions that are pending in the transaction pool and have a from address that is one of the accounts this node manages.", + "params": [ ], + "result": { + "name": "pendingTransactions", + "schema": { + "$ref": "#/components/schemas/Transactions" + } + } + }, + { + "name": "eth_protocolVersion", + "summary": "Returns the current ethereum protocol version.", + "params": [ ], + "result": { + "name": "protocolVersion", + "description": "The current ethereum protocol version", + "schema": { + "$ref": "#/components/schemas/Integer" + } + } + }, + { + "name": "eth_sendRawTransaction", + "summary": "Creates new message call transaction or a contract creation for signed transactions.", + "params": [ + { + "name": "signedTransactionData", + "required": true, + "description": "The signed transaction data", + "schema": { + "$ref": "#/components/schemas/Bytes" + } + } + ], + "result": { + "name": "transactionHash", + "description": "The transaction hash, or the zero hash if the transaction is not yet available.", + "schema": { + "$ref": "#/components/schemas/Keccak" + } + } + }, + { + "name": "eth_submitHashrate", + "deprecated": true, + "summary": "Used for submitting mining hashrate.", + "params": [ + { + "name": "hashRate", + "required": true, + "schema": { + "$ref": "#/components/schemas/DataWord" + } + }, + { + "name": "id", + "required": true, + "description": "String identifying the client", + "schema": { + "$ref": "#/components/schemas/DataWord" + } + } + ], + "result": { + "name": "submitHashRateSuccess", + "description": "whether of not submitting went through successfully", + "schema": { + "type": "boolean" + } + } + }, + { + "name": "eth_submitWork", + "summary": "Used for submitting a proof-of-work solution.", + "params": [ + { + "$ref": "#/components/contentDescriptors/Nonce" + }, + { + "name": "powHash", + "required": true, + "schema": { + "$ref": "#/components/schemas/PowHash" + } + }, + { + "name": "mixHash", + "required": true, + "schema": { + "$ref": "#/components/schemas/MixHash" + } + } + ], + "result": { + "name": "solutionValid", + "description": "returns true if the provided solution is valid, otherwise false.", + "schema": { + "type": "boolean" + } + }, + "examples": [ + { + "name": "submitWorkExample", + "params": [ + { + "name": "nonceExample", + "description": "example of a number only used once", + "value": "0x0000000000000001" + }, + { + "name": "powHashExample", + "description": "proof of work to submit", + "value": "0x6bf2cAE0dE3ec3ecA5E194a6C6e02cf42aADfe1C2c4Fff12E5D36C3Cf7297F22" + }, + { + "name": "mixHashExample", + "description": "the mix digest example", + "value": "0xD1FE5700000000000000000000000000D1FE5700000000000000000000000000" + } + ], + "result": { + "name": "solutionInvalidExample", + "description": "this example should return `false` as it is not a valid pow to submit", + "value": false + } + } + ] + }, + { + "name": "eth_syncing", + "summary": "Returns an object with data about the sync status or false.", + "params": [ ], + "result": { + "name": "syncing", + "schema": { + "title": "isSyncingResult", + "oneOf": [ + { + "title": "syncingData", + "description": "An object with sync status data", + "type": "object", + "properties": { + "startingBlock": { + "title": "syncingDataStartingBlock", + "description": "Block at which the import started (will only be reset, after the sync reached his head)", + "$ref": "#/components/schemas/Integer" + }, + "currentBlock": { + "title": "syncingDataCurrentBlock", + "description": "The current block, same as eth_blockNumber", + "$ref": "#/components/schemas/Integer" + }, + "highestBlock": { + "title": "syncingDataHighestBlock", + "description": "The estimated highest block", + "$ref": "#/components/schemas/Integer" + }, + "knownStates": { + "title": "syncingDataKnownStates", + "description": "The known states", + "$ref": "#/components/schemas/Integer" + }, + "pulledStates": { + "title": "syncingDataPulledStates", + "description": "The pulled states", + "$ref": "#/components/schemas/Integer" + } + } + }, + { + "type": "boolean" + } + ] + } + } + }, + { + "name": "eth_uninstallFilter", + "summary": "Uninstalls a filter with given id. Should always be called when watch is no longer needed. Additionally Filters timeout when they aren't requested with eth_getFilterChanges for a period of time.", + "params": [ + { + "name": "filterId", + "required": true, + "schema": { + "$ref": "#/components/schemas/FilterId" + } + } + ], + "result": { + "name": "filterUninstalledSuccess", + "description": "returns true if the filter was successfully uninstalled, false otherwise.", + "schema": { + "type": "boolean" + } + } + } + ], + "components": { + "schemas": { + "ProofNode": { + "title": "proofNode", + "description": "An individual node used to prove a path down a merkle-patricia-tree", + "$ref": "#/components/schemas/Bytes" + }, + "StorageProofKey": { + "title": "storageProofKey", + "description": "The key used to get the storage slot in its account tree.", + "$ref": "#/components/schemas/Integer" + }, + "StorageProof": { + "title": "storageProofSet", + "type": "array", + "description": "Current block header PoW hash.", + "items": { + "title": "storageProof", + "type": "object", + "description": "Object proving a relationship of a storage value to an account's storageHash.", + "properties": { + "key": { + "$ref": "#/components/schemas/StorageProofKey" + }, + "value": { + "title": "storageProofValue", + "description": "The value of the storage slot in its account tree", + "$ref": "#/components/schemas/Integer" + }, + "proof": { + "$ref": "#/components/schemas/ProofNodes" + } + } + } + }, + "ProofNodes": { + "title": "proofNodes", + "type": "array", + "description": "The set of node values needed to traverse a patricia merkle tree (from root to leaf) to retrieve a value", + "items": { + "$ref": "#/components/schemas/ProofNode" + } + }, + "PowHash": { + "title": "powHash", + "description": "Current block header PoW hash.", + "$ref": "#/components/schemas/DataWord" + }, + "SeedHash": { + "title": "seedHash", + "description": "The seed hash used for the DAG.", + "$ref": "#/components/schemas/DataWord" + }, + "MixHash": { + "title": "mixHash", + "description": "The mix digest.", + "$ref": "#/components/schemas/DataWord" + }, + "Difficulty": { + "title": "difficulty", + "description": "The boundary condition ('target'), 2^256 / difficulty.", + "$ref": "#/components/schemas/DataWord" + }, + "FilterId": { + "title": "filterId", + "type": "string", + "description": "An identifier used to reference the filter." + }, + "BlockHash": { + "title": "blockHash", + "type": "string", + "pattern": "^0x[a-fA-F\\d]{64}$", + "description": "The hex representation of the Keccak 256 of the RLP encoded block" + }, + "BlockNumber": { + "title": "blockNumber", + "type": "string", + "description": "The hex representation of the block's height", + "$ref": "#/components/schemas/Integer" + }, + "BlockNumberTag": { + "title": "blockNumberTag", + "type": "string", + "description": "The optional block height description", + "enum": [ + "earliest", + "latest", + "pending" + ] + }, + "BlockOrNull": { + "title": "blockOrNull", + "oneOf": [ + { + "$ref": "#/components/schemas/Block" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + }, + "IntegerOrNull": { + "title": "integerOrNull", + "oneOf": [ + { + "$ref": "#/components/schemas/Integer" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + }, + "AddressOrNull": { + "title": "addressOrNull", + "oneOf": [ + { + "$ref": "#/components/schemas/Address" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + }, + "Receipt": { + "title": "receipt", + "type": "object", + "description": "The receipt of a transaction", + "required": [ + "blockHash", + "blockNumber", + "contractAddress", + "cumulativeGasUsed", + "from", + "gasUsed", + "logs", + "logsBloom", + "to", + "transactionHash", + "transactionIndex" + ], + "properties": { + "blockHash": { + "$ref": "#/components/schemas/BlockHash" + }, + "blockNumber": { + "$ref": "#/components/schemas/BlockNumber" + }, + "contractAddress": { + "title": "ReceiptContractAddress", + "description": "The contract address created, if the transaction was a contract creation, otherwise null", + "$ref": "#/components/schemas/AddressOrNull" + }, + "cumulativeGasUsed": { + "title": "ReceiptCumulativeGasUsed", + "description": "The gas units used by the transaction", + "$ref": "#/components/schemas/Integer" + }, + "from": { + "$ref": "#/components/schemas/From" + }, + "gasUsed": { + "title": "ReceiptGasUsed", + "description": "The total gas used by the transaction", + "$ref": "#/components/schemas/Integer" + }, + "logs": { + "title": "logs", + "type": "array", + "description": "An array of all the logs triggered during the transaction", + "items": { + "$ref": "#/components/schemas/Log" + } + }, + "logsBloom": { + "$ref": "#/components/schemas/BloomFilter" + }, + "to": { + "$ref": "#/components/schemas/To" + }, + "transactionHash": { + "$ref": "#/components/schemas/TransactionHash" + }, + "transactionIndex": { + "$ref": "#/components/schemas/TransactionIndex" + }, + "postTransactionState": { + "title": "ReceiptPostTransactionState", + "description": "The intermediate stateRoot directly after transaction execution.", + "$ref": "#/components/schemas/Keccak" + }, + "status": { + "title": "ReceiptStatus", + "description": "Whether or not the transaction threw an error.", + "type": "boolean" + } + } + }, + "BloomFilter": { + "title": "bloomFilter", + "type": "string", + "description": "A 2048 bit bloom filter from the logs of the transaction. Each log sets 3 bits though taking the low-order 11 bits of each of the first three pairs of bytes in a Keccak 256 hash of the log's byte series" + }, + "Log": { + "title": "log", + "type": "object", + "description": "An indexed event generated during a transaction", + "properties": { + "address": { + "title": "LogAddress", + "description": "Sender of the transaction", + "$ref": "#/components/schemas/Address" + }, + "blockHash": { + "$ref": "#/components/schemas/BlockHash" + }, + "blockNumber": { + "$ref": "#/components/schemas/BlockNumber" + }, + "data": { + "title": "LogData", + "description": "The data/input string sent along with the transaction", + "$ref": "#/components/schemas/Bytes" + }, + "logIndex": { + "title": "LogIndex", + "description": "The index of the event within its transaction, null when its pending", + "$ref": "#/components/schemas/Integer" + }, + "removed": { + "title": "logIsRemoved", + "description": "Whether or not the log was orphaned off the main chain", + "type": "boolean" + }, + "topics": { + "$ref": "#/components/schemas/Topics" + }, + "transactionHash": { + "$ref": "#/components/schemas/TransactionHash" + }, + "transactionIndex": { + "$ref": "#/components/schemas/TransactionIndex" + } + } + }, + "Topics": { + "title": "LogTopics", + "description": "Topics are order-dependent. Each topic can also be an array of DATA with 'or' options.", + "type": "array", + "items": { + "$ref": "#/components/schemas/Topic" + } + }, + "Topic": { + "title": "topic", + "description": "32 Bytes DATA of indexed log arguments. (In solidity: The first topic is the hash of the signature of the event (e.g. Deposit(address,bytes32,uint256))", + "$ref": "#/components/schemas/DataWord" + }, + "TransactionIndex": { + "title": "transactionIndex", + "description": "The index of the transaction. null when its pending", + "$ref": "#/components/schemas/IntegerOrNull" + }, + "BlockNumberOrNull": { + "title": "blockNumberOrNull", + "description": "The block number or null when its the pending block", + "oneOf": [ + { + "$ref": "#/components/schemas/BlockNumber" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + }, + "BlockHashOrNull": { + "title": "blockHashOrNull", + "description": "The block hash or null when its the pending block", + "$ref": "#/components/schemas/KeccakOrPending" + }, + "NonceOrNull": { + "title": "nonceOrNull", + "description": "Randomly selected number to satisfy the proof-of-work or null when its the pending block", + "oneOf": [ + { + "$ref": "#/components/schemas/Nonce" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + }, + "From": { + "title": "From", + "description": "The sender of the transaction", + "$ref": "#/components/schemas/Address" + }, + "To": { + "title": "To", + "description": "Destination address of the transaction. Null if it was a contract create.", + "oneOf": [ + { + "$ref": "#/components/schemas/Address" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + }, + "Block": { + "title": "Block", + "description": "The Block is the collection of relevant pieces of information (known as the block header), together with information corresponding to the comprised transactions, and a set of other block headers that are known to have a parent equal to the present block’s parent’s parent.", + "type": "object", + "properties": { + "number": { + "$ref": "#/components/schemas/BlockNumberOrNull" + }, + "hash": { + "$ref": "#/components/schemas/BlockHashOrNull" + }, + "parentHash": { + "$ref": "#/components/schemas/BlockHash" + }, + "nonce": { + "$ref": "#/components/schemas/NonceOrNull" + }, + "sha3Uncles": { + "title": "blockShaUncles", + "description": "Keccak hash of the uncles data in the block", + "$ref": "#/components/schemas/Keccak" + }, + "logsBloom": { + "title": "blockLogsBloom", + "type": "string", + "description": "The bloom filter for the logs of the block or null when its the pending block", + "pattern": "^0x[a-fA-F\\d]+$" + }, + "transactionsRoot": { + "title": "blockTransactionsRoot", + "description": "The root of the transactions trie of the block.", + "$ref": "#/components/schemas/Keccak" + }, + "stateRoot": { + "title": "blockStateRoot", + "description": "The root of the final state trie of the block", + "$ref": "#/components/schemas/Keccak" + }, + "receiptsRoot": { + "title": "blockReceiptsRoot", + "description": "The root of the receipts trie of the block", + "$ref": "#/components/schemas/Keccak" + }, + "miner": { + "$ref": "#/components/schemas/AddressOrNull" + }, + "difficulty": { + "title": "blockDifficulty", + "type": "string", + "description": "Integer of the difficulty for this block" + }, + "totalDifficulty": { + "title": "blockTotalDifficulty", + "description": "Integer of the total difficulty of the chain until this block", + "$ref": "#/components/schemas/IntegerOrNull" + }, + "extraData": { + "title": "blockExtraData", + "type": "string", + "description": "The 'extra data' field of this block" + }, + "size": { + "title": "blockSize", + "type": "string", + "description": "Integer the size of this block in bytes" + }, + "gasLimit": { + "title": "blockGasLimit", + "type": "string", + "description": "The maximum gas allowed in this block" + }, + "gasUsed": { + "title": "blockGasUsed", + "type": "string", + "description": "The total used gas by all transactions in this block" + }, + "timestamp": { + "title": "blockTimeStamp", + "type": "string", + "description": "The unix timestamp for when the block was collated" + }, + "transactions": { + "title": "transactionsOrHashes", + "description": "Array of transaction objects, or 32 Bytes transaction hashes depending on the last given parameter", + "type": "array", + "items": { + "title": "transactionOrTransactionHash", + "oneOf": [ + { + "$ref": "#/components/schemas/Transaction" + }, + { + "$ref": "#/components/schemas/TransactionHash" + } + ] + } + }, + "uncles": { + "title": "uncleHashes", + "description": "Array of uncle hashes", + "type": "array", + "items": { + "title": "uncleHash", + "description": "Block hash of the RLP encoding of an uncle block", + "$ref": "#/components/schemas/Keccak" + } + } + } + }, + "Transaction": { + "title": "transaction", + "type": "object", + "required": [ + "gas", + "gasPrice", + "nonce" + ], + "properties": { + "blockHash": { + "$ref": "#/components/schemas/BlockHashOrNull" + }, + "blockNumber": { + "$ref": "#/components/schemas/BlockNumberOrNull" + }, + "from": { + "$ref": "#/components/schemas/From" + }, + "gas": { + "title": "transactionGas", + "type": "string", + "description": "The gas limit provided by the sender in Wei" + }, + "gasPrice": { + "title": "transactionGasPrice", + "type": "string", + "description": "The gas price willing to be paid by the sender in Wei" + }, + "hash": { + "$ref": "#/components/schemas/TransactionHash" + }, + "input": { + "title": "transactionInput", + "type": "string", + "description": "The data field sent with the transaction" + }, + "nonce": { + "title": "transactionNonce", + "description": "The total number of prior transactions made by the sender", + "$ref": "#/components/schemas/Nonce" + }, + "to": { + "$ref": "#/components/schemas/To" + }, + "transactionIndex": { + "$ref": "#/components/schemas/TransactionIndex" + }, + "value": { + "title": "transactionValue", + "description": "Value of Ether being transferred in Wei", + "$ref": "#/components/schemas/Keccak" + }, + "v": { + "title": "transactionSigV", + "type": "string", + "description": "ECDSA recovery id" + }, + "r": { + "title": "transactionSigR", + "type": "string", + "description": "ECDSA signature r" + }, + "s": { + "title": "transactionSigS", + "type": "string", + "description": "ECDSA signature s" + } + } + }, + "Transactions": { + "title": "transactions", + "description": "An array of transactions", + "type": "array", + "items": { + "$ref": "#/components/schemas/Transaction" + } + }, + "TransactionHash": { + "title": "transactionHash", + "type": "string", + "description": "Keccak 256 Hash of the RLP encoding of a transaction", + "$ref": "#/components/schemas/Keccak" + }, + "KeccakOrPending": { + "title": "keccakOrPending", + "oneOf": [ + { + "$ref": "#/components/schemas/Keccak" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + }, + "Keccak": { + "title": "keccak", + "type": "string", + "description": "Hex representation of a Keccak 256 hash", + "pattern": "^0x[a-fA-F\\d]{64}$" + }, + "Nonce": { + "title": "nonce", + "description": "A number only to be used once", + "$ref": "#/components/schemas/Integer" + }, + "Null": { + "title": "null", + "type": "null", + "description": "Null" + }, + "Integer": { + "title": "integer", + "type": "string", + "pattern": "^0x[a-fA-F0-9]+$", + "description": "Hex representation of the integer" + }, + "Address": { + "title": "address", + "type": "string", + "pattern": "^0x[a-fA-F\\d]{40}$" + }, + "Addresses": { + "title": "addresses", + "type": "array", + "description": "List of contract addresses from which to monitor events", + "items": { + "$ref": "#/components/schemas/Address" + } + }, + "Position": { + "title": "position", + "type": "string", + "description": "Hex representation of the storage slot where the variable exists", + "pattern": "^0x([a-fA-F0-9]?)+$" + }, + "DataWord": { + "title": "dataWord", + "type": "string", + "description": "Hex representation of a 256 bit unit of data", + "pattern": "^0x([a-fA-F\\d]{64})?$" + }, + "Bytes": { + "title": "bytes", + "type": "string", + "description": "Hex representation of a variable length byte array", + "pattern": "^0x([a-fA-F0-9]?)+$" + } + }, + "contentDescriptors": { + "Block": { + "name": "block", + "summary": "A block", + "description": "A block object", + "schema": { + "$ref": "#/components/schemas/Block" + } + }, + "Null": { + "name": "Null", + "description": "JSON Null value", + "summary": "Null value", + "schema": { + "$ref": "#/components/schemas/Null" + } + }, + "Signature": { + "name": "signature", + "summary": "The signature.", + "required": true, + "schema": { + "title": "signatureBytes", + "type": "string", + "description": "Hex representation of byte array between 2 and 65 chars long", + "pattern": "0x^([A-Fa-f0-9]{2}){65}$" + } + }, + "GasPrice": { + "name": "gasPrice", + "required": true, + "schema": { + "title": "gasPriceResult", + "description": "Integer of the current gas price", + "$ref": "#/components/schemas/Integer" + } + }, + "Transaction": { + "required": true, + "name": "transaction", + "schema": { + "$ref": "#/components/schemas/Transaction" + } + }, + "TransactionResult": { + "name": "transactionResult", + "description": "Returns a transaction or null", + "schema": { + "title": "TransactionOrNull", + "oneOf": [ + { + "$ref": "#/components/schemas/Transaction" + }, + { + "$ref": "#/components/schemas/Null" + } + ] + } + }, + "UncleCountResult": { + "name": "uncleCountResult", + "description": "The Number of total uncles in the given block", + "schema": { + "$ref": "#/components/schemas/IntegerOrNull" + } + }, + "Message": { + "name": "message", + "required": true, + "schema": { + "$ref": "#/components/schemas/Bytes" + } + }, + "Filter": { + "name": "filter", + "required": true, + "schema": { + "title": "filter", + "type": "object", + "description": "A filter used to monitor the blockchain for log/events", + "properties": { + "fromBlock": { + "$ref": "#/components/schemas/BlockNumber" + }, + "toBlock": { + "$ref": "#/components/schemas/BlockNumber" + }, + "address": { + "title": "oneOrArrayOfAddresses", + "oneOf": [ + { + "$ref": "#/components/schemas/Address" + }, + { + "$ref": "#/components/schemas/Addresses" + } + ] + }, + "topics": { + "$ref": "#/components/schemas/Topics" + } + } + } + }, + "Address": { + "name": "address", + "required": true, + "schema": { + "$ref": "#/components/schemas/Address" + } + }, + "BlockHash": { + "name": "blockHash", + "required": true, + "schema": { + "$ref": "#/components/schemas/BlockHash" + } + }, + "Nonce": { + "name": "nonce", + "required": true, + "schema": { + "$ref": "#/components/schemas/Nonce" + } + }, + "Position": { + "name": "key", + "required": true, + "schema": { + "$ref": "#/components/schemas/Position" + } + }, + "Logs": { + "name": "logs", + "description": "An array of all logs matching filter with given id.", + "schema": { + "title": "setOfLogs", + "type": "array", + "items": { + "$ref": "#/components/schemas/Log" + } + } + }, + "FilterId": { + "name": "filterId", + "schema": { + "$ref": "#/components/schemas/FilterId" + } + }, + "BlockNumber": { + "name": "blockNumber", + "required": true, + "schema": { + "title": "blockNumberOrTag", + "oneOf": [ + { + "$ref": "#/components/schemas/BlockNumber" + }, + { + "$ref": "#/components/schemas/BlockNumberTag" + } + ] + } + }, + "TransactionHash": { + "name": "transactionHash", + "required": true, + "schema": { + "$ref": "#/components/schemas/TransactionHash" + } + } + } + } +} diff --git a/example/circeApp/resources/logback.xml b/example/circeApp/resources/logback.xml new file mode 100644 index 0000000..ded8a2d --- /dev/null +++ b/example/circeApp/resources/logback.xml @@ -0,0 +1,18 @@ + + + + + %-5level %d{yyyy-MM-dd HH:mm:ss} [%-10.20thread] %logger{36} - %msg%n + + + + + + + + + + + + + \ No newline at end of file diff --git a/example/circeApp/src/io/iohk/armadillo/example/ExampleCirce.scala b/example/circeApp/src/io/iohk/armadillo/example/ExampleCirce.scala new file mode 100644 index 0000000..b0a4b09 --- /dev/null +++ b/example/circeApp/src/io/iohk/armadillo/example/ExampleCirce.scala @@ -0,0 +1,153 @@ +package io.iohk.armadillo.example + +import cats.effect.{ExitCode, IO, IOApp} +import io.circe.generic.semiauto._ +import io.circe.literal._ +import io.circe.{Decoder, Encoder, Json} +import io.iohk.armadillo._ +import io.iohk.armadillo.json.circe._ +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import io.iohk.armadillo.server._ +import io.iohk.armadillo.server.tapir.TapirInterpreter +import org.http4s.blaze.server.BlazeServerBuilder +import org.http4s.server.Router +import sttp.client3.asynchttpclient.cats.AsyncHttpClientCatsBackend +import sttp.model.{StatusCode, Uri} +import sttp.monad.MonadError +import sttp.tapir.integ.cats._ +import sttp.tapir.server.http4s.{Http4sServerInterpreter, Http4sServerOptions} +import sttp.tapir.{DecodeResult, Schema} + +import scala.concurrent.ExecutionContext + +object ExampleCirce extends IOApp { + + implicit val rpcBlockResponseEncoder: Encoder[RpcBlockResponse] = deriveEncoder + implicit val rpcBlockResponseDecoder: Decoder[RpcBlockResponse] = deriveDecoder + implicit val rpcBlockResponseSchema: Schema[RpcBlockResponse] = Schema.derived + + case class RpcBlockResponse(number: Int) + + val endpoint: JsonRpcServerEndpoint[IO] = jsonRpcEndpoint(m"eth_getBlockByNumber") + .in( + param[Int]("blockNumber").and(param[String]("includeTransactions")) + ) + .out[Option[RpcBlockResponse]]("blockResponse") + .serverLogic[IO] { case (int, string) => + println("user logic") + println(s"with input ${int + 123} ${string.toUpperCase}") + IO.delay(Left(JsonRpcError[Unit](1, "q", int))) + } + + override def run(args: List[String]): IO[ExitCode] = { + + implicit val catsMonadError: CatsMonadError[IO] = new CatsMonadError + + val tapirInterpreter = new TapirInterpreter[IO, Json]( + new CirceJsonSupport, + List(new LoggingEndpointInterceptor, new LoggingRequestInterceptor, new GenericIOInterceptor[Json]) + ) + val tapirEndpoints = tapirInterpreter.toTapirEndpointUnsafe(List(endpoint)) + val routes = Http4sServerInterpreter[IO](Http4sServerOptions.default[IO]).toRoutes(tapirEndpoints) + implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global + import sttp.tapir.client.sttp.SttpClientInterpreter + + BlazeServerBuilder[IO] + .withExecutionContext(ec) + .bindHttp(8545, "localhost") + .withHttpApp(Router("/" -> routes).orNotFound) + .resource + .flatMap { _ => + AsyncHttpClientCatsBackend.resource[IO]() + } + .use { client => + val sttpClient = SttpClientInterpreter().toClient(tapirEndpoints.endpoint, Some(Uri.apply("localhost", 8545)), client) + sttpClient.apply(json"""{"jsonrpc": "2.0", "method": "eth_getBlockByNumber", "params": [123, "true"], "id": 1}""".noSpaces).map { + case failure: DecodeResult.Failure => + println(s"response decoding failure $failure") + case DecodeResult.Value(v) => + v match { + case Left(value) => + println(s"error response: $value") + case Right((json: Option[Json], code: StatusCode)) => + println(s"response ${json.map(_.noSpaces)} code: $code") + case Right(other: Any) => + println(s"response $other") + } + } >> IO.never + } + .as(ExitCode.Success) + } +} + +class GenericIOInterceptor[Raw] extends EndpointInterceptor[IO, Raw] { + override def apply( + responder: Responder[IO, Raw], + jsonSupport: JsonSupport[Raw], + endpointHandler: EndpointHandler[IO, Raw] + ): EndpointHandler[IO, Raw] = { + new EndpointHandler[IO, Raw] { + override def onDecodeSuccess[I, E, O]( + ctx: EndpointHandler.DecodeSuccessContext[IO, I, E, O, Raw] + )(implicit monad: MonadError[IO]): IO[ResponseHandlingStatus[Raw]] = { + println(s"onDecodeSuccess ${ctx.endpoint.endpoint.methodName}") + endpointHandler.onDecodeSuccess(ctx).flatTap(_ => IO.println(s"after onDecodeSuccess ${ctx.endpoint.endpoint.methodName}")) + } + + override def onDecodeFailure( + ctx: EndpointHandler.DecodeFailureContext[IO, Raw] + )(implicit monad: MonadError[IO]): IO[ResponseHandlingStatus[Raw]] = { + println(s"onDecodeFailure ${ctx.endpoint.endpoint.methodName}") + endpointHandler.onDecodeFailure(ctx).flatTap(_ => IO.println(s"after onDecodeFailure ${ctx.endpoint.endpoint.methodName}")) + } + } + } +} + +class LoggingEndpointInterceptor extends EndpointInterceptor[IO, Json] { + + override def apply( + responder: Responder[IO, Json], + jsonSupport: JsonSupport[Json], + endpointHandler: EndpointHandler[IO, Json] + ): EndpointHandler[IO, Json] = { + new EndpointHandler[IO, Json] { + override def onDecodeSuccess[I, E, O]( + ctx: EndpointHandler.DecodeSuccessContext[IO, I, E, O, Json] + )(implicit monad: MonadError[IO]): IO[ResponseHandlingStatus[Json]] = { + println(s"onDecodeSuccess ${ctx.endpoint.endpoint.methodName}") + endpointHandler.onDecodeSuccess(ctx).flatTap(_ => IO.println(s"after onDecodeSuccess ${ctx.endpoint.endpoint.methodName}")) + } + + override def onDecodeFailure( + ctx: EndpointHandler.DecodeFailureContext[IO, Json] + )(implicit monad: MonadError[IO]): IO[ResponseHandlingStatus[Json]] = { + println(s"onDecodeFailure ${ctx.endpoint.endpoint.methodName}") + endpointHandler.onDecodeFailure(ctx).flatTap(_ => IO.println(s"after onDecodeFailure ${ctx.endpoint.endpoint.methodName}")) + } + } + } +} + +class LoggingRequestInterceptor extends RequestInterceptor[IO, Json] { + override def apply( + responder: Responder[IO, Json], + jsonSupport: JsonSupport[Json], + requestHandler: MethodInterceptor[IO, Json] => RequestHandler[IO, Json] + ): RequestHandler[IO, Json] = { + new RequestHandler[IO, Json] { + override def onDecodeSuccess(request: JsonSupport.Json[Json])(implicit monad: MonadError[IO]): IO[ResponseHandlingStatus[Json]] = { + requestHandler + .apply(MethodInterceptor.noop[IO, Json]()) + .onDecodeSuccess(request) + .flatTap(_ => IO.println("after onDecodeSuccess")) + } + + override def onDecodeFailure( + ctx: RequestHandler.DecodeFailureContext + )(implicit monad: MonadError[IO]): IO[ResponseHandlingStatus[Json]] = { + requestHandler.apply(MethodInterceptor.noop[IO, Json]()).onDecodeFailure(ctx).flatTap(_ => IO.println("after onDecodeFailure")) + } + } + } +} diff --git a/example/circeFs2/README.md b/example/circeFs2/README.md new file mode 100644 index 0000000..0e4e8d5 --- /dev/null +++ b/example/circeFs2/README.md @@ -0,0 +1,3 @@ +```shell +cat request.json | nc -U ./fs2-unix-sockets-test.sock +``` \ No newline at end of file diff --git a/example/circeFs2/request.json b/example/circeFs2/request.json new file mode 100644 index 0000000..5f1f3ae --- /dev/null +++ b/example/circeFs2/request.json @@ -0,0 +1,8 @@ +{ + "jsonrpc": "2.0", + "method": "hello", + "params": [ + 42 + ], + "id": 1 +} diff --git a/example/circeFs2/src/io/iohk/armadillo/circefs2/Main.scala b/example/circeFs2/src/io/iohk/armadillo/circefs2/Main.scala new file mode 100644 index 0000000..6b1d3c5 --- /dev/null +++ b/example/circeFs2/src/io/iohk/armadillo/circefs2/Main.scala @@ -0,0 +1,37 @@ +package io.iohk.armadillo.circefs2 + +import cats.effect.{ExitCode, IO, IOApp} +import fs2.Pipe +import fs2.io.net.unixsocket.{UnixSocketAddress, UnixSockets} +import io.circe.Json +import io.iohk.armadillo._ +import io.iohk.armadillo.json.circe._ +import io.iohk.armadillo.server.fs2.Fs2Interpreter + +object Main extends IOApp { + val hello_in_int_out_string: JsonRpcEndpoint[Int, Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1")) + .out[String]("response") + + override def run(args: List[String]): IO[ExitCode] = { + val address = UnixSocketAddress("./example/circeFs2/fs2-unix-sockets-test.sock") + + val se = hello_in_int_out_string.serverLogic[IO](int => IO.pure(Right(int.toString))) + val jsonSupport = new CirceJsonSupport + val jsonRpcServer: Pipe[IO, String, Json] = new Fs2Interpreter[IO, Json](jsonSupport).toFs2Pipe(List(se)).getOrElse(???) + + UnixSockets[IO] + .server(address) + .flatMap { client => + client.reads + .through(fs2.text.utf8.decode) + .through(jsonRpcServer) + .map(jsonSupport.stringify) + .through(fs2.text.utf8.encode) + .through(client.writes) + } + .compile + .drain + .as(ExitCode.Success) + } +} diff --git a/example/json4sAndTrace4cats/resources/logback.xml b/example/json4sAndTrace4cats/resources/logback.xml new file mode 100644 index 0000000..ded8a2d --- /dev/null +++ b/example/json4sAndTrace4cats/resources/logback.xml @@ -0,0 +1,18 @@ + + + + + %-5level %d{yyyy-MM-dd HH:mm:ss} [%-10.20thread] %logger{36} - %msg%n + + + + + + + + + + + + + \ No newline at end of file diff --git a/example/json4sAndTrace4cats/src/io/iohk/armadillo/example/ExampleTraced.scala b/example/json4sAndTrace4cats/src/io/iohk/armadillo/example/ExampleTraced.scala new file mode 100644 index 0000000..980188b --- /dev/null +++ b/example/json4sAndTrace4cats/src/io/iohk/armadillo/example/ExampleTraced.scala @@ -0,0 +1,75 @@ +package io.iohk.armadillo.example + +import cats.Applicative +import cats.data.Kleisli +import cats.effect.kernel.{MonadCancelThrow, Resource, Sync} +import cats.effect.{ExitCode, IO, IOApp} +import io.iohk.armadillo.json.json4s._ +import io.iohk.armadillo.server.tapir.TapirInterpreter +import io.iohk.armadillo.trace4cats.syntax._ +import io.iohk.armadillo.{JsonRpcServerEndpoint, _} +import io.janstenpickle.trace4cats.Span +import io.janstenpickle.trace4cats.base.context.Provide +import io.janstenpickle.trace4cats.inject.{EntryPoint, Trace} +import io.janstenpickle.trace4cats.kernel.SpanSampler +import io.janstenpickle.trace4cats.log.LogSpanCompleter +import io.janstenpickle.trace4cats.model.TraceProcess +import org.http4s.blaze.server.BlazeServerBuilder +import org.http4s.server.Router +import org.json4s.{Formats, JValue, NoTypeHints, Serialization} +import sttp.tapir.Schema +import sttp.tapir.integ.cats.CatsMonadError +import sttp.tapir.server.http4s.{Http4sServerInterpreter, Http4sServerOptions} + +import scala.concurrent.ExecutionContext + +object ExampleTraced extends IOApp { + implicit val rpcBlockResponseSchema: Schema[RpcBlockResponse] = Schema.derived + implicit val serialization: Serialization = org.json4s.jackson.Serialization + implicit val formats: Formats = org.json4s.jackson.Serialization.formats(NoTypeHints) + implicit val json4sSupport: Json4sSupport = Json4sSupport(org.json4s.jackson.parseJson(_), org.json4s.jackson.compactJson) + + case class RpcBlockResponse(number: Int) + + class Endpoints[F[_]: Sync, G[_]: MonadCancelThrow: Trace] { + + val getBlockByNumber = jsonRpcEndpoint(m"eth_getBlockByNumber") + .in( + param[Int]("blockNumber").and(param[String]("includeTransactions")) + ) + .out[Option[RpcBlockResponse]]("blockResponse") + .serverLogic[G] { case (int, string) => + println("user logic") + println(s"with input ${int + 123} ${string.toUpperCase}") + Applicative[G].pure(Left(JsonRpcError(11, "qwe", 11))) + } + + def tracedEndpoints(entryPoint: EntryPoint[F])(implicit P: Provide[F, G, Span[F]]): List[JsonRpcServerEndpoint[F]] = + List(getBlockByNumber.inject(entryPoint)) + } + + override def run(args: List[String]): IO[ExitCode] = { + implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global + implicit val catsMonadError: CatsMonadError[IO] = new CatsMonadError + val endpoints = new Endpoints[IO, Kleisli[IO, Span[IO], *]] + val routesR = for { + completer <- Resource.eval(LogSpanCompleter.create[IO](TraceProcess("example"))) + ep = EntryPoint(SpanSampler.always[IO], completer) + tapirInterpreter = new TapirInterpreter[IO, JValue](json4sSupport) + tapirEndpoints = tapirInterpreter.toTapirEndpoint(endpoints.tracedEndpoints(ep)).getOrElse(???) + routes = Http4sServerInterpreter[IO](Http4sServerOptions.default[IO]).toRoutes(tapirEndpoints) + } yield routes + + routesR.use { routes => + BlazeServerBuilder[IO] + .withExecutionContext(ec) + .bindHttp(8545, "localhost") + .withHttpApp(Router("/" -> routes).orNotFound) + .resource + .use { _ => + IO.never + } + .as(ExitCode.Success) + } + } +} diff --git a/example/json4sApp/resources/logback.xml b/example/json4sApp/resources/logback.xml new file mode 100644 index 0000000..ded8a2d --- /dev/null +++ b/example/json4sApp/resources/logback.xml @@ -0,0 +1,18 @@ + + + + + %-5level %d{yyyy-MM-dd HH:mm:ss} [%-10.20thread] %logger{36} - %msg%n + + + + + + + + + + + + + \ No newline at end of file diff --git a/example/json4sApp/src/io/iohk/armadillo/example/ExampleJson4s.scala b/example/json4sApp/src/io/iohk/armadillo/example/ExampleJson4s.scala new file mode 100644 index 0000000..cd8e16f --- /dev/null +++ b/example/json4sApp/src/io/iohk/armadillo/example/ExampleJson4s.scala @@ -0,0 +1,54 @@ +package io.iohk.armadillo.example + +import cats.effect.{ExitCode, IO, IOApp} +import cats.syntax.all._ +import io.iohk.armadillo._ +import io.iohk.armadillo.json.json4s._ +import io.iohk.armadillo.server.tapir.TapirInterpreter +import org.http4s.blaze.server.BlazeServerBuilder +import org.http4s.server.Router +import org.json4s.{Formats, JValue, NoTypeHints, Serialization} +import sttp.tapir.Schema +import sttp.tapir.integ.cats.CatsMonadError +import sttp.tapir.server.http4s.{Http4sServerInterpreter, Http4sServerOptions} + +import scala.concurrent.ExecutionContext + +object ExampleJson4s extends IOApp { + implicit val rpcBlockResponseSchema: Schema[RpcBlockResponse] = Schema.derived + implicit val serialization: Serialization = org.json4s.jackson.Serialization + implicit val formats: Formats = org.json4s.jackson.Serialization.formats(NoTypeHints) + implicit val json4sSupport: Json4sSupport = Json4sSupport(org.json4s.jackson.parseJson(_), org.json4s.jackson.compactJson) + + case class RpcBlockResponse(number: Int) + + val endpoint: JsonRpcServerEndpoint[IO] = jsonRpcEndpoint(m"eth_getBlockByNumber") + .in( + param[Int]("blockNumber").and(param[String]("includeTransactions")) + ) + .out[Option[RpcBlockResponse]]("blockResponse") + .serverLogic[IO] { case (int, string) => + println("user logic") + println(s"with input ${int + 123} ${string.toUpperCase}") + IO.delay(RpcBlockResponse(int).some.asRight) + } + + override def run(args: List[String]): IO[ExitCode] = { + implicit val catsMonadError: CatsMonadError[IO] = new CatsMonadError + val tapirInterpreter = new TapirInterpreter[IO, JValue](json4sSupport) + val tapirEndpoints = tapirInterpreter.toTapirEndpoint(List(endpoint)).getOrElse(???) + val routes = Http4sServerInterpreter[IO](Http4sServerOptions.default[IO]).toRoutes(tapirEndpoints) + implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global + + // IO.unit.as(ExitCode.Success) + BlazeServerBuilder[IO] + .withExecutionContext(ec) + .bindHttp(8545, "localhost") + .withHttpApp(Router("/" -> routes).orNotFound) + .resource + .use { _ => + IO.never + } + .as(ExitCode.Success) + } +} diff --git a/example/tapirWebsocket/resources/logback.xml b/example/tapirWebsocket/resources/logback.xml new file mode 100644 index 0000000..ded8a2d --- /dev/null +++ b/example/tapirWebsocket/resources/logback.xml @@ -0,0 +1,18 @@ + + + + + %-5level %d{yyyy-MM-dd HH:mm:ss} [%-10.20thread] %logger{36} - %msg%n + + + + + + + + + + + + + \ No newline at end of file diff --git a/example/tapirWebsocket/src/io/iohk/armadillo/example/ExampleTapirWebsocket.scala b/example/tapirWebsocket/src/io/iohk/armadillo/example/ExampleTapirWebsocket.scala new file mode 100644 index 0000000..f1fbc6d --- /dev/null +++ b/example/tapirWebsocket/src/io/iohk/armadillo/example/ExampleTapirWebsocket.scala @@ -0,0 +1,71 @@ +package io.iohk.armadillo.example + +import cats.effect.{ExitCode, IO, IOApp} +import fs2.Pipe +import io.circe.Json +import io.circe.literal._ +import io.iohk.armadillo._ +import io.iohk.armadillo.json.circe._ +import io.iohk.armadillo.server.fs2.Fs2Interpreter +import org.http4s.HttpRoutes +import org.http4s.blaze.server.BlazeServerBuilder +import org.http4s.server.Router +import org.http4s.server.websocket.WebSocketBuilder2 +import sttp.capabilities.WebSockets +import sttp.capabilities.fs2.Fs2Streams +import sttp.client3.asynchttpclient.fs2.AsyncHttpClientFs2Backend +import sttp.client3.{UriContext, asWebSocket, basicRequest} +import sttp.tapir.integ.cats._ +import sttp.tapir.json.circe._ +import sttp.tapir.server.http4s.Http4sServerInterpreter +import sttp.tapir.{CodecFormat, PublicEndpoint, webSocketBody} +import sttp.ws.WebSocket + +import scala.concurrent.ExecutionContext + +object ExampleTapirWebsocket extends IOApp { + + val endpoint: JsonRpcServerEndpoint[IO] = jsonRpcEndpoint(m"greet") + .in( + param[String]("name") + ) + .out[String]("greeting") + .serverLogic[IO](name => IO.delay(Right(s"Hello $name"))) + + override def run(args: List[String]): IO[ExitCode] = { + implicit val catsMonadError: CatsMonadError[IO] = new CatsMonadError + implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global + + val jsonRpcServer: Pipe[IO, String, Json] = new Fs2Interpreter[IO, Json](new CirceJsonSupport).toFs2Pipe(List(endpoint)).getOrElse(???) + + val wsEndpoint: PublicEndpoint[Unit, Unit, Pipe[IO, String, Json], Fs2Streams[IO] with WebSockets] = + sttp.tapir.endpoint.get.out(webSocketBody[String, CodecFormat.TextPlain, Json, CodecFormat.Json](Fs2Streams[IO])) + + val wsRoutes: WebSocketBuilder2[IO] => HttpRoutes[IO] = + Http4sServerInterpreter[IO]().toWebSocketRoutes(wsEndpoint.serverLogicSuccess(_ => IO.pure(jsonRpcServer))) + + BlazeServerBuilder[IO] + .withExecutionContext(ec) + .bindHttp(8001, "localhost") + .withHttpWebSocketApp(wsb => Router("/" -> wsRoutes(wsb)).orNotFound) + .resource + .flatMap { _ => + AsyncHttpClientFs2Backend.resource[IO]() + } + .use { backend => + // Client which interacts with the web socket + basicRequest + .response(asWebSocket { ws: WebSocket[IO] => + for { + _ <- IO.println("sending request") + _ <- ws.sendText(json"""{"jsonrpc": "2.0", "method": "greet", "params": ["John"], "id": 1 }""".noSpaces) + r1 <- ws.receiveText() + _ = println(s"received response: $r1") + } yield () + }) + .get(uri"ws://localhost:8001/") + .send(backend) + } + .as(ExitCode.Success) + } +} diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..ba50bef --- /dev/null +++ b/flake.lock @@ -0,0 +1,59 @@ +{ + "nodes": { + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1650374568, + "narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "b4a34015c698c7793d592d66adbab377907a2be8", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "locked": { + "lastModified": 1659877975, + "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1663926790, + "narHash": "sha256-94I7+O66v5WbyYOP+qI9HmfeffoYvLqo93oJE4/20Is=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "866431777b6c766cea0db7aad347824d3816be68", + "type": "github" + }, + "original": { + "owner": "nixos", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-compat": "flake-compat", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..f1621fd --- /dev/null +++ b/flake.nix @@ -0,0 +1,30 @@ +{ + inputs = { + nixpkgs.url = "github:nixos/nixpkgs"; + flake-utils.url = "github:numtide/flake-utils"; + }; + + inputs.flake-compat = { + url = "github:edolstra/flake-compat"; + flake = false; + }; + + outputs = { self, nixpkgs, flake-utils, ... }@inputs: + flake-utils.lib.eachDefaultSystem ( + system: + let + jvm = final: prev: { jdk = final.openjdk17; jre = final.jdk; }; + pkgs = import nixpkgs { + inherit system; + overlays = [ jvm ]; + }; + in + { + devShell = pkgs.mkShell { + buildInputs = [ + pkgs.gnupg + ]; + }; + } + ); +} \ No newline at end of file diff --git a/json/circe/src/io/iohk/armadillo/json/circe/ArmadilloCirceJson.scala b/json/circe/src/io/iohk/armadillo/json/circe/ArmadilloCirceJson.scala new file mode 100644 index 0000000..2585795 --- /dev/null +++ b/json/circe/src/io/iohk/armadillo/json/circe/ArmadilloCirceJson.scala @@ -0,0 +1,53 @@ +package io.iohk.armadillo.json.circe + +import cats.implicits.toFunctorOps +import io.circe.generic.semiauto._ +import io.circe.{Decoder, Encoder, Json} +import io.iohk.armadillo._ +import sttp.tapir.{DecodeResult, Schema} + +trait ArmadilloCirceJson { + + implicit def jsonRpcCodec[H: Encoder: Decoder: Schema]: JsonRpcCodec[H] = new JsonRpcCodec[H] { + override type L = Json + + override def encode(h: H): Json = Encoder[H].apply(h) + + override def schema: Schema[H] = implicitly[Schema[H]] + + override def decode(l: Json): DecodeResult[H] = { + implicitly[Decoder[H]].decodeJson(l) match { + case Left(value) => DecodeResult.Error(l.noSpaces, value) + case Right(value) => DecodeResult.Value(value) + } + } + + override def show(l: Json): String = l.noSpaces + } + + implicit val jsonRpcIdEncoder: Encoder[JsonRpcId] = Encoder.instance[JsonRpcId] { + case JsonRpcId.IntId(value) => Encoder.encodeInt(value) + case JsonRpcId.StringId(value) => Encoder.encodeString(value) + } + + implicit val jsonRpcIdDecoder: Decoder[JsonRpcId] = Decoder.decodeInt + .map(JsonRpcId.IntId) + .widen + .or(Decoder.decodeString.map(JsonRpcId.StringId).widen[JsonRpcId]) + + implicit val jsonRpcErrorNoDataEncoder: Encoder[JsonRpcError[Unit]] = Encoder { i => + Json.obj("code" -> Json.fromInt(i.code), "message" -> Json.fromString(i.message)) + } + implicit val jsonRpcErrorNoDataDecoder: Decoder[JsonRpcError[Unit]] = Decoder { i => + for { + code <- i.downField("code").as[Int] + msg <- i.downField("message").as[String] + } yield JsonRpcError.noData(code, msg) + } + implicit def jsonRpcErrorEncoder[T: Encoder]: Encoder[JsonRpcError[T]] = deriveEncoder[JsonRpcError[T]] + implicit def jsonRpcErrorDecoder[T: Decoder]: Decoder[JsonRpcError[T]] = deriveDecoder[JsonRpcError[T]] + + implicit val jsonRpcSuccessResponseEncoder: Encoder[JsonRpcSuccessResponse[Json]] = deriveEncoder[JsonRpcSuccessResponse[Json]] + implicit val jsonRpcRequestDecoder: Decoder[JsonRpcRequest[Json]] = deriveDecoder[JsonRpcRequest[Json]] + implicit val jsonRpcErrorResponseEncoder: Encoder[JsonRpcErrorResponse[Json]] = deriveEncoder[JsonRpcErrorResponse[Json]] +} diff --git a/json/circe/src/io/iohk/armadillo/json/circe/CirceJsonSupport.scala b/json/circe/src/io/iohk/armadillo/json/circe/CirceJsonSupport.scala new file mode 100644 index 0000000..fe63546 --- /dev/null +++ b/json/circe/src/io/iohk/armadillo/json/circe/CirceJsonSupport.scala @@ -0,0 +1,61 @@ +package io.iohk.armadillo.json.circe + +import io.circe._ +import io.iohk.armadillo._ +import io.iohk.armadillo.server.JsonSupport +import io.iohk.armadillo.server.JsonSupport.{Json => AJson} +import sttp.tapir.DecodeResult + +class CirceJsonSupport extends JsonSupport[Json] { + + override def asArray(seq: Seq[Json]): Json = Json.arr(seq: _*) + + override def jsNull: Json = Json.Null + + override def parse(string: String): DecodeResult[AJson[Json]] = { + io.circe.parser.decode[Json](string) match { + case Left(value) => DecodeResult.Error(string, value) + case Right(value) => DecodeResult.Value(materialize(value)) + } + } + + def materialize(json: Json): AJson[Json] = { + json.fold( + jsonNull = AJson.Other(json), + jsonBoolean = _ => AJson.Other(json), + jsonNumber = _ => AJson.Other(json), + jsonString = _ => AJson.Other(json), + jsonArray = arr => AJson.JsonArray.apply(arr), + jsonObject = obj => AJson.JsonObject(obj.toList) + ) + } + + override def demateralize(json: AJson[Json]): Json = { + json match { + case AJson.JsonObject(raw) => Json.obj(raw: _*) + case AJson.JsonArray(raw) => asArray(raw) + case AJson.Other(raw) => raw + } + } + + override def stringify(raw: Json): String = raw.noSpaces + + override def encodeErrorNoData(error: JsonRpcError.NoData): Json = Encoder[JsonRpcError.NoData].apply(error) + + override def encodeErrorWithData(error: JsonRpcError[Json]): Json = Encoder[JsonRpcError[Json]].apply(error) + + override def encodeResponse(response: JsonRpcResponse[Json]): Json = { + response match { + case success: JsonRpcSuccessResponse[Json] => Encoder[JsonRpcSuccessResponse[Json]].apply(success) + case er: JsonRpcErrorResponse[Json] => Encoder[JsonRpcErrorResponse[Json]].apply(er) + } + } + + override def decodeJsonRpcRequest(obj: AJson.JsonObject[Json]): DecodeResult[JsonRpcRequest[AJson[Json]]] = { + val raw = demateralize(obj) + Decoder[JsonRpcRequest[Json]].decodeJson(raw) match { + case Left(value) => DecodeResult.Error(raw.noSpaces, value) + case Right(value) => DecodeResult.Value(value.copy(params = value.params.map(materialize))) + } + } +} diff --git a/json/circe/src/io/iohk/armadillo/json/circe/package.scala b/json/circe/src/io/iohk/armadillo/json/circe/package.scala new file mode 100644 index 0000000..e2e58ea --- /dev/null +++ b/json/circe/src/io/iohk/armadillo/json/circe/package.scala @@ -0,0 +1,3 @@ +package io.iohk.armadillo.json + +package object circe extends ArmadilloCirceJson diff --git a/json/json4s/src/io/iohk/armadillo/json/json4s/ArmadilloJson4s.scala b/json/json4s/src/io/iohk/armadillo/json/json4s/ArmadilloJson4s.scala new file mode 100644 index 0000000..de6ec50 --- /dev/null +++ b/json/json4s/src/io/iohk/armadillo/json/json4s/ArmadilloJson4s.scala @@ -0,0 +1,36 @@ +package io.iohk.armadillo.json.json4s + +import io.iohk.armadillo._ +import org.json4s.JsonAST.JValue +import org.json4s.{Extraction, Formats, JNothing, JNull} +import sttp.tapir.{DecodeResult, Schema} + +import scala.util.{Failure, Success, Try} + +trait ArmadilloJson4s { + implicit def jsonRpcCodec[H: Schema](implicit formats: Formats, manifest: Manifest[H], json4sSupport: Json4sSupport): JsonRpcCodec[H] = + new JsonRpcCodec[H] { + override type L = JValue + + override def encode(h: H): JValue = Extraction.decompose(h) + + override def schema: Schema[H] = implicitly[Schema[H]] + + override def decode(l: JValue): DecodeResult[H] = { + Try( + if (manifest.runtimeClass == classOf[Option[_]]) { + (l match { + case JNothing | JNull => None + case value => Some(value.extract(implicitly, manifest.typeArguments.head)) + }).asInstanceOf[H] + } else + l.extract[H] + ) match { + case Failure(exception) => DecodeResult.Error(l.toString, exception) + case Success(value) => DecodeResult.Value(value) + } + } + + override def show(l: JValue): String = json4sSupport.stringify(l) + } +} diff --git a/json/json4s/src/io/iohk/armadillo/json/json4s/Json4sSupport.scala b/json/json4s/src/io/iohk/armadillo/json/json4s/Json4sSupport.scala new file mode 100644 index 0000000..cfd1120 --- /dev/null +++ b/json/json4s/src/io/iohk/armadillo/json/json4s/Json4sSupport.scala @@ -0,0 +1,90 @@ +package io.iohk.armadillo.json.json4s + +import io.iohk.armadillo._ +import io.iohk.armadillo.server.JsonSupport +import io.iohk.armadillo.server.JsonSupport.Json +import org.json4s.JsonAST.JValue +import org.json4s._ +import sttp.tapir.DecodeResult + +import scala.util.{Failure, Success, Try} + +class Json4sSupport private (parseAsJValue: String => JValue, render: JValue => String)(implicit formats: Formats) + extends JsonSupport[JValue] { + + override def asArray(seq: Seq[JValue]): JValue = JArray(seq.toList) + + override def jsNull: JValue = JNull + + override def encodeErrorNoData(error: JsonRpcError[Unit]): JValue = { + val map = Map("code" -> error.code, "message" -> error.message) + Extraction.decompose(map) + } + + override def encodeErrorWithData(error: JsonRpcError[JValue]): JValue = { // TODO check + val map = Map("code" -> error.code, "message" -> error.message, "data" -> error.data) + Extraction.decompose(map) + } + + override def encodeResponse(response: JsonRpcResponse[JValue]): JValue = { + response match { + case success: JsonRpcSuccessResponse[JValue] => Extraction.decompose(success) + case err: JsonRpcErrorResponse[JValue] => Extraction.decompose(err) + } + } + + override def parse(string: String): DecodeResult[JsonSupport.Json[JValue]] = { + Try(parseAsJValue(string)) match { + case Failure(exception) => DecodeResult.Error(string, exception) + case Success(value) => + DecodeResult.Value(materialize(value)) + } + } + + override def stringify(raw: JValue): String = render(raw) + + override def decodeJsonRpcRequest(obj: Json.JsonObject[JValue]): DecodeResult[JsonRpcRequest[Json[JValue]]] = { + val raw = demateralize(obj) + Try(raw.extract[JsonRpcRequest[JValue]]) match { + case Failure(exception) => DecodeResult.Error(raw.toString, exception) + case Success(value) => DecodeResult.Value(value.copy(params = value.params.map(materialize))) + } + } + + override def materialize(raw: JValue): Json[JValue] = { + raw match { + case JObject(obj) => Json.JsonObject(obj) + case JArray(arr) => Json.JsonArray(arr.toVector) + case other => Json.Other(other) + } + } + + override def demateralize(json: Json[JValue]): JValue = { + json match { + case Json.JsonObject(fields) => JObject(fields) + case Json.JsonArray(values) => JArray(values.toList) + case Json.Other(raw) => raw + } + } +} + +object Json4sSupport { + def apply(parseAsJValue: String => JValue, render: JValue => String)(implicit formats: Formats): Json4sSupport = { + new Json4sSupport(parseAsJValue, render)(formats + JsonRpcIdSerializer) + } + + object JsonRpcIdSerializer + extends CustomSerializer[JsonRpcId](_ => + ( + { + case JInt(value) => JsonRpcId.IntId(value.intValue) + case JString(value) if value.toIntOption.nonEmpty => JsonRpcId.IntId(value.toInt) + case JString(value) => JsonRpcId.StringId(value) + }, + { + case JsonRpcId.IntId(v) => JInt(v) + case JsonRpcId.StringId(v) => JString(v) + } + ) + ) +} diff --git a/json/json4s/src/io/iohk/armadillo/json/json4s/package.scala b/json/json4s/src/io/iohk/armadillo/json/json4s/package.scala new file mode 100644 index 0000000..1166fd6 --- /dev/null +++ b/json/json4s/src/io/iohk/armadillo/json/json4s/package.scala @@ -0,0 +1,3 @@ +package io.iohk.armadillo.json + +package object json4s extends ArmadilloJson4s diff --git a/millw b/millw new file mode 100755 index 0000000..62e5e18 --- /dev/null +++ b/millw @@ -0,0 +1,171 @@ +#!/usr/bin/env sh + +# This is a wrapper script, that automatically download mill from GitHub release pages +# You can give the required mill version with --mill-version parameter +# If no version is given, it falls back to the value of DEFAULT_MILL_VERSION +# +# Project page: https://github.com/lefou/millw +# Script Version: 0.4.2 +# +# If you want to improve this script, please also contribute your changes back! +# +# Licensed under the Apache License, Version 2.0 + + +DEFAULT_MILL_VERSION=0.10.0 + +set -e + +MILL_REPO_URL="https://github.com/com-lihaoyi/mill" + +if [ -z "${CURL_CMD}" ] ; then + CURL_CMD=curl +fi + +# Explicit commandline argument takes precedence over all other methods +if [ "$1" = "--mill-version" ] ; then + shift + if [ "x$1" != "x" ] ; then + MILL_VERSION="$1" + shift + else + echo "You specified --mill-version without a version." 1>&2 + echo "Please provide a version that matches one provided on" 1>&2 + echo "${MILL_REPO_URL}/releases" 1>&2 + false + fi +fi + +# Please note, that if a MILL_VERSION is already set in the environment, +# We reuse it's value and skip searching for a value. + +# If not already set, read .mill-version file +if [ -z "${MILL_VERSION}" ] ; then + if [ -f ".mill-version" ] ; then + MILL_VERSION="$(head -n 1 .mill-version 2> /dev/null)" + fi +fi + +if [ -n "${XDG_CACHE_HOME}" ] ; then + MILL_DOWNLOAD_PATH="${XDG_CACHE_HOME}/mill/download" +else + MILL_DOWNLOAD_PATH="${HOME}/.cache/mill/download" +fi + +# If not already set, try to fetch newest from Github +if [ -z "${MILL_VERSION}" ] ; then + # TODO: try to load latest version from release page + echo "No mill version specified." 1>&2 + echo "You should provide a version via '.mill-version' file or --mill-version option." 1>&2 + + mkdir -p "${MILL_DOWNLOAD_PATH}" + LANG=C touch -d '1 hour ago' "${MILL_DOWNLOAD_PATH}/.expire_latest" 2>/dev/null || ( + # we might be on OSX or BSD which don't have -d option for touch + # but probably a -A [-][[hh]mm]SS + touch "${MILL_DOWNLOAD_PATH}/.expire_latest"; touch -A -010000 "${MILL_DOWNLOAD_PATH}/.expire_latest" + ) || ( + # in case we still failed, we retry the first touch command with the intention + # to show the (previously suppressed) error message + LANG=C touch -d '1 hour ago' "${MILL_DOWNLOAD_PATH}/.expire_latest" + ) + + # POSIX shell variant of bash's -nt operator, see https://unix.stackexchange.com/a/449744/6993 + # if [ "${MILL_DOWNLOAD_PATH}/.latest" -nt "${MILL_DOWNLOAD_PATH}/.expire_latest" ] ; then + if [ -n "$(find -L "${MILL_DOWNLOAD_PATH}/.latest" -prune -newer "${MILL_DOWNLOAD_PATH}/.expire_latest")" ]; then + # we know a current latest version + MILL_VERSION=$(head -n 1 "${MILL_DOWNLOAD_PATH}"/.latest 2> /dev/null) + fi + + if [ -z "${MILL_VERSION}" ] ; then + # we don't know a current latest version + echo "Retrieving latest mill version ..." 1>&2 + LANG=C ${CURL_CMD} -s -i -f -I ${MILL_REPO_URL}/releases/latest 2> /dev/null | grep --ignore-case Location: | sed s'/^.*tag\///' | tr -d '\r\n' > "${MILL_DOWNLOAD_PATH}/.latest" + MILL_VERSION=$(head -n 1 "${MILL_DOWNLOAD_PATH}"/.latest 2> /dev/null) + fi + + if [ -z "${MILL_VERSION}" ] ; then + # Last resort + MILL_VERSION="${DEFAULT_MILL_VERSION}" + echo "Falling back to hardcoded mill version ${MILL_VERSION}" 1>&2 + else + echo "Using mill version ${MILL_VERSION}" 1>&2 + fi +fi + +MILL="${MILL_DOWNLOAD_PATH}/${MILL_VERSION}" + +try_to_use_system_mill() { + MILL_IN_PATH="$(command -v mill || true)" + + if [ -z "${MILL_IN_PATH}" ]; then + return + fi + + UNIVERSAL_SCRIPT_MAGIC="@ 2>/dev/null # 2>nul & echo off & goto BOF" + + if ! head -c 128 "${MILL_IN_PATH}" | grep -qF "${UNIVERSAL_SCRIPT_MAGIC}"; then + if [ -n "${MILLW_VERBOSE}" ]; then + echo "Could not determine mill version of ${MILL_IN_PATH}, as it does not start with the universal script magic2" 1>&2 + fi + return + fi + + # Roughly the size of the universal script. + MILL_VERSION_SEARCH_RANGE="2403" + MILL_IN_PATH_VERSION=$(head -c "${MILL_VERSION_SEARCH_RANGE}" "${MILL_IN_PATH}" |\ + sed -n 's/^.*-DMILL_VERSION=\([^\s]*\) .*$/\1/p' |\ + head -n 1) + + if [ -z "${MILL_IN_PATH_VERSION}" ]; then + echo "Could not determine mill version, even though ${MILL_IN_PATH} has the universal script magic" 1>&2 + return + fi + + if [ "${MILL_IN_PATH_VERSION}" = "${MILL_VERSION}" ]; then + MILL="${MILL_IN_PATH}" + fi +} +try_to_use_system_mill + +# If not already downloaded, download it +if [ ! -s "${MILL}" ] ; then + + # support old non-XDG download dir + MILL_OLD_DOWNLOAD_PATH="${HOME}/.mill/download" + OLD_MILL="${MILL_OLD_DOWNLOAD_PATH}/${MILL_VERSION}" + if [ -x "${OLD_MILL}" ] ; then + MILL="${OLD_MILL}" + else + VERSION_PREFIX="$(echo $MILL_VERSION | cut -b -4)" + case $VERSION_PREFIX in + 0.0. | 0.1. | 0.2. | 0.3. | 0.4. ) + DOWNLOAD_SUFFIX="" + ;; + *) + DOWNLOAD_SUFFIX="-assembly" + ;; + esac + unset VERSION_PREFIX + + DOWNLOAD_FILE=$(mktemp mill.XXXXXX) + # TODO: handle command not found + echo "Downloading mill ${MILL_VERSION} from ${MILL_REPO_URL}/releases ..." 1>&2 + MILL_VERSION_TAG=$(echo $MILL_VERSION | sed -E 's/([^-]+)(-M[0-9]+)?(-.*)?/\1\2/') + ${CURL_CMD} -f -L -o "${DOWNLOAD_FILE}" "${MILL_REPO_URL}/releases/download/${MILL_VERSION_TAG}/${MILL_VERSION}${DOWNLOAD_SUFFIX}" + chmod +x "${DOWNLOAD_FILE}" + mkdir -p "${MILL_DOWNLOAD_PATH}" + mv "${DOWNLOAD_FILE}" "${MILL}" + + unset DOWNLOAD_FILE + unset DOWNLOAD_SUFFIX + fi +fi + +unset MILL_DOWNLOAD_PATH +unset MILL_OLD_DOWNLOAD_PATH +unset OLD_MILL +unset MILL_VERSION +unset MILL_VERSION_TAG +unset MILL_REPO_URL + +exec "${MILL}" "$@" diff --git a/openrpc/circe/src/io/iohk/armadillo/openrpc/circe/ArmadilloOpenRpcCirce.scala b/openrpc/circe/src/io/iohk/armadillo/openrpc/circe/ArmadilloOpenRpcCirce.scala new file mode 100644 index 0000000..20ae3e3 --- /dev/null +++ b/openrpc/circe/src/io/iohk/armadillo/openrpc/circe/ArmadilloOpenRpcCirce.scala @@ -0,0 +1,25 @@ +package io.iohk.armadillo.openrpc.circe + +import io.circe.generic.semiauto._ +import io.circe.{Encoder, Json} +import io.iohk.armadillo.openrpc.model._ +import sttp.apispec.internal.JsonSchemaCirceEncoders + +trait ArmadilloOpenRpcCirce extends JsonSchemaCirceEncoders { + // note: these are strict val-s, order matters! + implicit val paramEncoder: Encoder[OpenRpcParam] = deriveEncoder[OpenRpcParam] + implicit val resultEncoder: Encoder[OpenRpcResult] = deriveEncoder[OpenRpcResult] + implicit val errorEncoder: Encoder[OpenRpcError] = deriveEncoder[OpenRpcError] + + implicit val extDescriptionEncoder: Encoder[OpenRpcExternalDocs] = deriveEncoder[OpenRpcExternalDocs] + implicit val tagsEncoder: Encoder[OpenRpcMethodTag] = deriveEncoder[OpenRpcMethodTag] + + implicit val methodEncoder: Encoder[OpenRpcMethod] = deriveEncoder[OpenRpcMethod] + implicit val infoEncoder: Encoder[OpenRpcInfo] = deriveEncoder[OpenRpcInfo] + implicit val componentsEncoder: Encoder[OpenRpcComponents] = deriveEncoder[OpenRpcComponents] + implicit val documentEncoder: Encoder[OpenRpcDocument] = deriveEncoder[OpenRpcDocument] + + implicit def encodeRequiredList[T: Encoder]: Encoder[RequiredList[T]] = { case requiredList: RequiredList[T] => + Json.arr(requiredList.wrapped.map(i => implicitly[Encoder[T]].apply(i)): _*) + } +} diff --git a/openrpc/circe/src/io/iohk/armadillo/openrpc/circe/package.scala b/openrpc/circe/src/io/iohk/armadillo/openrpc/circe/package.scala new file mode 100644 index 0000000..2a5a230 --- /dev/null +++ b/openrpc/circe/src/io/iohk/armadillo/openrpc/circe/package.scala @@ -0,0 +1,7 @@ +package io.iohk.armadillo.openrpc + +import sttp.apispec.AnySchema + +package object circe extends ArmadilloOpenRpcCirce { + override val anyObjectEncoding: AnySchema.Encoding = AnySchema.Encoding.Boolean +} diff --git a/openrpc/circeYaml/src/io/iohk/armadillo/openrpc/circe/yaml/ArmadilloOpenRpcCirceYaml.scala b/openrpc/circeYaml/src/io/iohk/armadillo/openrpc/circe/yaml/ArmadilloOpenRpcCirceYaml.scala new file mode 100644 index 0000000..9379be5 --- /dev/null +++ b/openrpc/circeYaml/src/io/iohk/armadillo/openrpc/circe/yaml/ArmadilloOpenRpcCirceYaml.scala @@ -0,0 +1,12 @@ +package io.iohk.armadillo.openrpc.circe.yaml + +import io.circe.syntax._ +import io.circe.yaml.Printer +import io.iohk.armadillo.openrpc.circe._ +import io.iohk.armadillo.openrpc.model.OpenRpcDocument + +trait ArmadilloOpenRpcCirceYaml { + implicit class RichOpenRpcDocument(document: OpenRpcDocument) { + def toYaml: String = Printer(dropNullKeys = true, preserveOrder = true).pretty(document.asJson) + } +} diff --git a/openrpc/circeYaml/src/io/iohk/armadillo/openrpc/circe/yaml/package.scala b/openrpc/circeYaml/src/io/iohk/armadillo/openrpc/circe/yaml/package.scala new file mode 100644 index 0000000..56727a8 --- /dev/null +++ b/openrpc/circeYaml/src/io/iohk/armadillo/openrpc/circe/yaml/package.scala @@ -0,0 +1,3 @@ +package io.iohk.armadillo.openrpc.circe + +package object yaml extends ArmadilloOpenRpcCirceYaml diff --git a/openrpc/model/src/io/iohk/armadillo/openrpc/model/OpenRpcDocument.scala b/openrpc/model/src/io/iohk/armadillo/openrpc/model/OpenRpcDocument.scala new file mode 100644 index 0000000..6e8f813 --- /dev/null +++ b/openrpc/model/src/io/iohk/armadillo/openrpc/model/OpenRpcDocument.scala @@ -0,0 +1,54 @@ +package io.iohk.armadillo.openrpc.model + +import sttp.apispec.{ReferenceOr, Schema} + +import scala.collection.immutable.ListMap + +case class RequiredList[T](wrapped: List[T]) + +object RequiredList { + def empty[T]: RequiredList[T] = RequiredList(List.empty[T]) +} + +case class OpenRpcDocument( + openrpc: String = "1.2.1", + info: OpenRpcInfo, + methods: RequiredList[OpenRpcMethod], + components: Option[OpenRpcComponents] +) + +case class OpenRpcComponents(contentDescriptors: List[Unit], schemas: ListMap[String, ReferenceOr[Schema]]) + +case class OpenRpcInfo(version: String, title: String) + +case class OpenRpcMethod( + name: String, + tags: List[OpenRpcMethodTag] = List.empty, + summary: Option[String] = None, + description: Option[String] = None, + params: RequiredList[OpenRpcParam] = RequiredList.empty, + errors: List[OpenRpcError] = List.empty, + result: OpenRpcResult +) + +case class OpenRpcMethodTag( + name: String, + summary: Option[String] = None, + description: Option[String] = None, + externalDocs: Option[OpenRpcExternalDocs] = None +) + +case class OpenRpcExternalDocs(url: String, description: Option[String]) + +case class OpenRpcParam( + name: String, + summary: Option[String] = None, + description: Option[String] = None, + required: Boolean = false, + schema: ReferenceOr[Schema], + deprecated: Option[Boolean] +) + +case class OpenRpcResult(name: String, summary: Option[String] = None, description: Option[String] = None, schema: ReferenceOr[Schema]) + +case class OpenRpcError(code: Int, message: String, data: Option[ReferenceOr[Schema]]) diff --git a/openrpc/src/io/iohk/armadillo/openrpc/EndpointToOpenRpcMethods.scala b/openrpc/src/io/iohk/armadillo/openrpc/EndpointToOpenRpcMethods.scala new file mode 100644 index 0000000..2feb4c9 --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/EndpointToOpenRpcMethods.scala @@ -0,0 +1,96 @@ +package io.iohk.armadillo.openrpc + +import io.iohk.armadillo.openrpc.EndpointToOpenRpcMethods.EmptyResult +import io.iohk.armadillo.openrpc.model._ +import io.iohk.armadillo.{AnyEndpoint, JsonRpcCodec, JsonRpcEndpoint, JsonRpcErrorOutput, JsonRpcIO, JsonRpcInput} +import sttp.apispec.Schema +import sttp.tapir.SchemaType + +class EndpointToOpenRpcMethods(schemas: Schemas) { + + def methods(es: List[AnyEndpoint]): List[OpenRpcMethod] = { + es.map(convertEndpoint) + } + + private def convertEndpoint(endpoint: JsonRpcEndpoint[_, _, _]): OpenRpcMethod = { + val result = convertResult(endpoint) + OpenRpcMethod( + name = endpoint.methodName.asString, + params = RequiredList(convertParams(endpoint.input)), + result = result, + summary = endpoint.info.summary, + description = endpoint.info.description, + tags = endpoint.info.tags + .map(t => + OpenRpcMethodTag( + name = t.name, + summary = t.summary, + description = t.description, + externalDocs = t.externalDocs.map(ed => OpenRpcExternalDocs(url = ed.url, description = ed.description)) + ) + ) + .toList, + errors = convertError(endpoint.error) + ) + } + + private def convertParams(jsonRpcInput: JsonRpcInput[_]): List[OpenRpcParam] = { + jsonRpcInput match { + case o: JsonRpcIO.Single[_] => List(convertParam(o)) + case _: JsonRpcIO.Empty[_] => List.empty + case JsonRpcInput.Pair(left, right) => + convertParams(left) ++ convertParams(right) + } + } + + private def convertParam[I](jsonRpcInput: JsonRpcIO.Single[I]) = { + val schema = schemas(jsonRpcInput.codec, replaceOptionWithCoproduct = false) + OpenRpcParam( + name = jsonRpcInput.name, + schema = schema.map(updateSchema(_, jsonRpcInput.codec, jsonRpcInput.info.examples)), + required = jsonRpcInput.codec.schema.schemaType match { + case SchemaType.SOption(_) => false + case _ => true + }, + deprecated = jsonRpcInput.info.deprecated, + summary = jsonRpcInput.info.summary, + description = jsonRpcInput.info.description + ) + } + + private def convertResult[O](endpoint: JsonRpcEndpoint[_, _, O]) = { + endpoint.output match { + case _: JsonRpcIO.Empty[O] => EmptyResult + case single: JsonRpcIO.Single[O] => + val schema = schemas(single.codec, replaceOptionWithCoproduct = true) + OpenRpcResult( + name = single.name, + schema = schema.map(updateSchema(_, single.codec, single.info.examples)), + summary = single.info.summary, + description = single.info.description + ) + } + } + + private def updateSchema[T](schema: Schema, codec: JsonRpcCodec[T], examples: Set[T]) = { + schema.copy(example = exampleValue(codec, examples)) + } + + private def convertError(errorOutput: JsonRpcErrorOutput[_]): List[OpenRpcError] = { + errorOutput match { + case single: JsonRpcErrorOutput.Fixed[_] => + List(OpenRpcError(single.code, single.message, None)) + case single: JsonRpcErrorOutput.FixedWithData[_] => + val schema = schemas(single.codec, replaceOptionWithCoproduct = true) + List(OpenRpcError(single.code, single.message, Some(schema))) + case JsonRpcErrorOutput.OneOf(variants, _) => + variants.flatMap(v => convertError(v.output)) + case _ => List.empty + } + } +} +object EndpointToOpenRpcMethods { + private val EmptyResult = + OpenRpcResult(name = "empty result", schema = Right(Schema())) // TODO rename to no-response as it carries the intent clearer + +} diff --git a/openrpc/src/io/iohk/armadillo/openrpc/NameToSchemaReference.scala b/openrpc/src/io/iohk/armadillo/openrpc/NameToSchemaReference.scala new file mode 100644 index 0000000..5a3cc19 --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/NameToSchemaReference.scala @@ -0,0 +1,16 @@ +package io.iohk.armadillo.openrpc + +import sttp.apispec.Reference +import sttp.tapir.{Schema => TSchema} + +class NameToSchemaReference(nameToKey: Map[TSchema.SName, String]) { + def map(name: TSchema.SName): Reference = { + nameToKey.get(name) match { + case Some(key) => + Reference.to("#/components/schemas/", key) + case None => + // no reference to internal model found. assuming external reference + Reference(name.fullName) + } + } +} diff --git a/openrpc/src/io/iohk/armadillo/openrpc/OpenRpcDocsInterpreter.scala b/openrpc/src/io/iohk/armadillo/openrpc/OpenRpcDocsInterpreter.scala new file mode 100644 index 0000000..69249e2 --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/OpenRpcDocsInterpreter.scala @@ -0,0 +1,30 @@ +package io.iohk.armadillo.openrpc + +import io.iohk.armadillo.AnyEndpoint +import io.iohk.armadillo.openrpc.model._ +import sttp.apispec.ExampleValue +import sttp.tapir.Schema + +case class OpenRpcDocsInterpreter(markOptionsAsNullable: Boolean = true) { + def toOpenRpc(info: OpenRpcInfo, endpoints: List[AnyEndpoint]): OpenRpcDocument = { + val sortedEndpoints = endpoints.sorted + + val toNamedSchemas = new ToNamedSchemas + val (keyToSchema, schemas) = + new SchemaForEndpoints(sortedEndpoints, toNamedSchemas, markOptionsAsNullable).calculate() + + val methodCreator = new EndpointToOpenRpcMethods(schemas) + + OpenRpcDocument( + info = info, + methods = RequiredList(methodCreator.methods(sortedEndpoints)), + components = if (keyToSchema.nonEmpty) Some(OpenRpcComponents(List.empty, keyToSchema.sortByKey)) else None + ) + } + +} + +object OpenRpcDocsInterpreter { + + type NamedSchema = (Schema.SName, Schema[_], Option[ExampleValue]) +} diff --git a/openrpc/src/io/iohk/armadillo/openrpc/SchemaForEndpoints.scala b/openrpc/src/io/iohk/armadillo/openrpc/SchemaForEndpoints.scala new file mode 100644 index 0000000..b03953c --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/SchemaForEndpoints.scala @@ -0,0 +1,81 @@ +package io.iohk.armadillo.openrpc + +import io.iohk.armadillo.openrpc.OpenRpcDocsInterpreter.NamedSchema +import io.iohk.armadillo.{AnyEndpoint, JsonRpcErrorOutput, JsonRpcIO, JsonRpcInput, JsonRpcOutput} +import sttp.apispec.{ReferenceOr, Schema => ASchema, SchemaType => _} +import sttp.tapir.Schema.SName + +import scala.collection.immutable.ListMap + +class SchemaForEndpoints(es: List[AnyEndpoint], toNamedSchemas: ToNamedSchemas, markOptionsAsNullable: Boolean) { + + private val defaultSchemaName: SName => String = info => { + val shortName = info.fullName.split('.').last + (shortName +: info.typeParameterShortNames).mkString("_") + } + + def calculate(): (ListMap[String, ReferenceOr[ASchema]], Schemas) = { + val sObjects = ToNamedSchemas.unique(es.flatMap(e => forInput(e.input) ++ forOutput(e.output) ++ forErrorOutput(e.error))) + val infoToKey = calculateUniqueKeys(sObjects.map(_._1), defaultSchemaName) + val objectToSchemaReference = new NameToSchemaReference(infoToKey) + val schemaConverter = new SchemaToOpenRpcSchema(objectToSchemaReference, markOptionsAsNullable, infoToKey) + val schemas = new Schemas(schemaConverter, objectToSchemaReference, markOptionsAsNullable) + val infosToSchema = sObjects.map(td => (td._1, schemaConverter(td._2, td._3))).toListMap + val schemaKeys = infosToSchema.map { case (k, v) => k -> ((infoToKey(k), v)) } + (schemaKeys.values.toListMap, schemas) + } + + private def forInput(input: JsonRpcInput[_]): List[NamedSchema] = { + input match { + case io: JsonRpcIO[_] => forIO(io, replaceOptionWithCoproduct = false) + case JsonRpcInput.Pair(left, right) => forInput(left) ++ forInput(right) + } + } + + private def forIO(io: JsonRpcIO[_], replaceOptionWithCoproduct: Boolean): List[NamedSchema] = { + io match { + case JsonRpcIO.Empty() => List.empty + case JsonRpcIO.Single(codec, info, _) => toNamedSchemas(codec, Some(info), replaceOptionWithCoproduct) + } + } + + private def forOutput(output: JsonRpcOutput[_]): List[NamedSchema] = { + output match { + case io: JsonRpcIO[_] => forIO(io, replaceOptionWithCoproduct = true) + } + } + + private def forErrorOutput(output: JsonRpcErrorOutput[_]): List[NamedSchema] = { + output match { + case JsonRpcErrorOutput.FixedWithData(_, _, codec) => toNamedSchemas(codec, None, replaceOptionWithCoproduct = true) + case JsonRpcErrorOutput.OneOf(variants, _) => variants.flatMap(v => forErrorOutput(v.output)) + case _ => List.empty + } + } + + private[openrpc] def calculateUniqueKeys[T](ts: Iterable[T], toName: T => String): Map[T, String] = { + case class Assigment(nameToT: Map[String, T], tToKey: Map[T, String]) + ts + .foldLeft(Assigment(Map.empty, Map.empty)) { case (Assigment(nameToT, tToKey), t) => + val key = uniqueName(toName(t), n => !nameToT.contains(n) || nameToT.get(n).contains(t)) + + Assigment( + nameToT + (key -> t), + tToKey + (t -> key) + ) + } + .tToKey + } + + // scalafix:off DisableSyntax.var + private[openrpc] def uniqueName(base: String, isUnique: String => Boolean): String = { + var i = 0 + var result = base + while (!isUnique(result)) { + i += 1 + result = base + i + } + result + } + // scalafix:on +} diff --git a/openrpc/src/io/iohk/armadillo/openrpc/SchemaToOpenRpcSchema.scala b/openrpc/src/io/iohk/armadillo/openrpc/SchemaToOpenRpcSchema.scala new file mode 100644 index 0000000..390f188 --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/SchemaToOpenRpcSchema.scala @@ -0,0 +1,135 @@ +package io.iohk.armadillo.openrpc + +import sttp.apispec.{ + ArraySchemaType, + BasicSchemaType, + Discriminator, + ExampleSingleValue, + ExampleValue, + Pattern, + Reference, + ReferenceOr, + Schema => ASchema, + SchemaFormat, + SchemaType +} +import sttp.tapir.{Schema => TSchema, SchemaType => TSchemaType, Validator} + +class SchemaToOpenRpcSchema( + nameToSchemaReference: NameToSchemaReference, + markOptionsAsNullable: Boolean, + infoToKey: Map[TSchema.SName, String] +) { + def apply[T](schema: TSchema[T], examples: Option[ExampleValue] = None, isOptionElement: Boolean = false): ReferenceOr[ASchema] = { + val nullable = markOptionsAsNullable && isOptionElement + val result = schema.schemaType match { + case TSchemaType.SInteger() => Right(ASchema(SchemaType.Integer)) + case TSchemaType.SNumber() => Right(ASchema(SchemaType.Number)) + case TSchemaType.SBoolean() => Right(ASchema(SchemaType.Boolean)) + case TSchemaType.SString() => Right(ASchema(SchemaType.String)) + case p @ TSchemaType.SProduct(fields) => + Right( + ASchema(SchemaType.Object).copy( + required = p.required.map(_.encodedName), + properties = fields.map { f => + f.schema match { + case TSchema(_, Some(name), _, _, _, _, _, _, _, _, _) => f.name.encodedName -> Left(nameToSchemaReference.map(name)) + case schema => f.name.encodedName -> apply(schema) + } + }.toListMap + ) + ) + case TSchemaType.SArray(TSchema(_, Some(name), _, _, _, _, _, _, _, _, _)) => + Right(ASchema(SchemaType.Array).copy(items = Some(Left(nameToSchemaReference.map(name))))) + case TSchemaType.SArray(el) => Right(ASchema(SchemaType.Array).copy(items = Some(apply(el)))) + case TSchemaType.SOption(TSchema(_, Some(name), _, _, _, _, _, _, _, _, _)) => Left(nameToSchemaReference.map(name)) + case TSchemaType.SOption(el) => apply(el, examples, isOptionElement = true) + case TSchemaType.SBinary() => Right(ASchema(SchemaType.String).copy(format = SchemaFormat.Binary)) + case TSchemaType.SDate() => Right(ASchema(SchemaType.String).copy(format = SchemaFormat.Date)) + case TSchemaType.SDateTime() => Right(ASchema(SchemaType.String).copy(format = SchemaFormat.DateTime)) + case TSchemaType.SRef(fullName) => Left(nameToSchemaReference.map(fullName)) + case TSchemaType.SCoproduct(schemas, d) => + Right( + ASchema + .apply( + schemas + .map { + case TSchema(_, Some(name), _, _, _, _, _, _, _, _, _) => Left(nameToSchemaReference.map(name)) + case t => apply(t) + } + .sortBy { + case Left(Reference(ref, _, _)) => ref + case Right(schema) => + schema.`type` + .map { + case ArraySchemaType(value) => value.sortBy(schemaType => schemaType.value).mkString + case schemaType: BasicSchemaType => schemaType.value + } + .getOrElse("") + schema.toString + }, + d.map(tDiscriminatorToADiscriminator) + ) + ) + case TSchemaType.SOpenProduct(_, valueSchema) => + Right( + ASchema(SchemaType.Object).copy( + required = List.empty, + additionalProperties = Some(valueSchema.name match { + case Some(name) => Left(nameToSchemaReference.map(name)) + case _ => apply(valueSchema) + }) + ) + ) + } + result + .map(s => if (nullable) s.copy(nullable = Some(true)) else s) + .map(addMetadata(_, schema, examples)) + .map(addValidatorInfo(_, schema.validator)) + } + + private def addMetadata(oschema: ASchema, tschema: TSchema[_], examples: Option[ExampleValue]): ASchema = { + oschema.copy( + title = tschema.name.flatMap(infoToKey.get), + description = tschema.description.orElse(oschema.description), + default = tschema.default.flatMap { case (_, raw) => raw.flatMap(r => exampleValue(tschema, r)) }.orElse(oschema.default), + example = examples.orElse(tschema.encodedExample.flatMap(exampleValue(tschema, _))).orElse(oschema.example), + format = tschema.format.orElse(oschema.format), + deprecated = (if (tschema.deprecated) Some(true) else None).orElse(oschema.deprecated) + ) + } + + private def tDiscriminatorToADiscriminator(discriminator: TSchemaType.SDiscriminator): Discriminator = { + val schemas = Some( + discriminator.mapping.map { case (k, TSchemaType.SRef(fullName)) => + k -> nameToSchemaReference.map(fullName).$ref + }.toListMap + ) + Discriminator(discriminator.name.encodedName, schemas) + } + + private def addValidatorInfo(schema: ASchema, validator: Validator[_]): ASchema = { + validator match { + case m @ Validator.Min(value, exclusive) => + val minimum = BigDecimal(m.valueIsNumeric.toDouble(value)) + schema.copy(minimum = Some(minimum), exclusiveMinimum = Some(exclusive)) + case m @ Validator.Max(value, exclusive) => + val maximum = BigDecimal(m.valueIsNumeric.toDouble(value)) + schema.copy(maximum = Some(maximum), exclusiveMaximum = Some(exclusive)) + case Validator.Enumeration(possibleValues, encode, _) => + val encodedEnumValues = possibleValues.map(v => ExampleSingleValue(encode.flatMap(_(v)).getOrElse(v.toString))) + schema.copy(`enum` = Some(encodedEnumValues)) + case Validator.Pattern(value) => schema.copy(pattern = Some(Pattern(value))) + case Validator.MinLength(value) => schema.copy(minLength = Some(value)) + case Validator.MaxLength(value) => schema.copy(maxLength = Some(value)) + case Validator.MinSize(value) => schema.copy(minItems = Some(value)) + case Validator.MaxSize(value) => schema.copy(maxItems = Some(value)) + case Validator.All(validators) => validators.foldLeft(schema)(addValidatorInfo) + case Validator.Custom(_, showMessage) => + val newDescription = Some(List(schema.description, showMessage).flatten.mkString("\n")).filter(_.nonEmpty) + schema.copy(description = newDescription) + case Validator.Mapped(wrapped, _) => addValidatorInfo(schema, wrapped) + case Validator.Any(_) => schema + } + } + +} diff --git a/openrpc/src/io/iohk/armadillo/openrpc/Schemas.scala b/openrpc/src/io/iohk/armadillo/openrpc/Schemas.scala new file mode 100644 index 0000000..055e803 --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/Schemas.scala @@ -0,0 +1,47 @@ +package io.iohk.armadillo.openrpc + +import io.iohk.armadillo.JsonRpcCodec +import sttp.apispec.{ReferenceOr, Schema => ASchema, SchemaType} +import sttp.tapir.{Schema => TSchema, SchemaType => TSchemaType} + +import scala.annotation.tailrec + +/** Converts a tapir schema to an OpenAPI/AsyncAPI reference (if the schema is named), or to the appropriate schema. */ +class Schemas( + tschemaToASchema: SchemaToOpenRpcSchema, + nameToSchemaReference: NameToSchemaReference, + markOptionsAsNullable: Boolean +) { + def apply[T](codec: JsonRpcCodec[T], replaceOptionWithCoproduct: Boolean): ReferenceOr[ASchema] = + apply(codec.schema, replaceOptionWithCoproduct) + + @tailrec + private def apply(schema: TSchema[_], replaceOptionWithCoproduct: Boolean): ReferenceOr[ASchema] = { + schema.name match { + case Some(name) => Left(nameToSchemaReference.map(name)) + case None => + schema.schemaType match { + case TSchemaType.SArray(TSchema(_, Some(name), isOptional, _, _, _, _, _, _, _, _)) => + Right(ASchema(SchemaType.Array).copy(items = Some(Left(nameToSchemaReference.map(name))))) + .map(s => if (isOptional && markOptionsAsNullable) s.copy(nullable = Some(true)) else s) + case TSchemaType.SOption(ts) => + if (replaceOptionWithCoproduct) { + val synthesised = TSchema( // TODO deduplicate (ToNamedSchemas) + TSchemaType.SCoproduct[Any]( + subtypes = List( + ts, + TSchema(schemaType = TSchemaType.SProduct(List.empty), name = Some(TSchema.SName("Null")), description = Some("null")) + ), + discriminator = None + )(_ => None), + name = ts.name.map(sn => sn.copy(fullName = s"${sn.fullName}OrNull")) + ) + apply(synthesised, replaceOptionWithCoproduct) + } else { + apply(ts, replaceOptionWithCoproduct) + } + case _ => tschemaToASchema(schema) + } + } + } +} diff --git a/openrpc/src/io/iohk/armadillo/openrpc/ToNamedSchemas.scala b/openrpc/src/io/iohk/armadillo/openrpc/ToNamedSchemas.scala new file mode 100644 index 0000000..7626602 --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/ToNamedSchemas.scala @@ -0,0 +1,81 @@ +package io.iohk.armadillo.openrpc + +import io.iohk.armadillo.openrpc.OpenRpcDocsInterpreter.NamedSchema +import io.iohk.armadillo.{JsonRpcCodec, JsonRpcIoInfo} +import sttp.apispec.ExampleValue +import sttp.tapir.{Schema => TSchema, SchemaType => TSchemaType} + +import scala.collection.mutable.ListBuffer + +class ToNamedSchemas { + def apply[T](codec: JsonRpcCodec[T], maybeInfo: Option[JsonRpcIoInfo[T]], replaceOptionWithCoproduct: Boolean): List[NamedSchema] = { + val schema = if (replaceOptionWithCoproduct) { + val synthesized = codec.schema match { + case t @ TSchema(o @ TSchemaType.SOption(element), _, _, _, _, _, _, _, _, _, _) => + val element1 = propagateMetadataForOption(t, o).element + TSchema( // TODO deduplicate (Schemas) + TSchemaType.SCoproduct[Any]( + subtypes = List( + element1, + TSchema(schemaType = TSchemaType.SProduct(List.empty), name = Some(TSchema.SName("Null")), description = Some("null")) + ), + discriminator = None + )(_ => None), + name = element.name.map(sn => sn.copy(fullName = s"${sn.fullName}OrNull")) + ) + case other => other + } + synthesized + } else { + codec.schema + } + + val examples: Option[ExampleValue] = maybeInfo.flatMap(info => exampleValue(codec, info.examples)) + + apply(schema, examples) + } + + private def apply(schema: TSchema[_], examples: Option[ExampleValue] = None): List[NamedSchema] = { + val thisSchema: List[NamedSchema] = schema.name match { + case Some(name) => List((name, schema, examples)) + case None => Nil + } + val nestedSchemas = schema match { + case TSchema(TSchemaType.SArray(o), _, _, _, _, _, _, _, _, _, _) => apply(o) + case t @ TSchema(o: TSchemaType.SOption[_, _], _, _, _, _, _, _, _, _, _, _) => + // #1168: if there's an optional field which is an object, with metadata defined (such as description), this + // needs to be propagated to the target object, so that it isn't omitted. + apply(propagateMetadataForOption(t, o).element, examples) + case TSchema(st: TSchemaType.SProduct[_], _, _, _, _, _, _, _, _, _, _) => productSchemas(st) + case TSchema(st: TSchemaType.SCoproduct[_], _, _, _, _, _, _, _, _, _, _) => coproductSchemas(st) + case TSchema(st: TSchemaType.SOpenProduct[_, _], _, _, _, _, _, _, _, _, _, _) => apply(st.valueSchema) + case _ => List.empty + } + + thisSchema ++ nestedSchemas + } + + private def productSchemas[T](st: TSchemaType.SProduct[T]): List[NamedSchema] = st.fields.flatMap(a => apply(a.schema, None)) + + private def coproductSchemas[T](st: TSchemaType.SCoproduct[T]): List[NamedSchema] = st.subtypes.flatMap(subSchema => apply(subSchema)) +} + +object ToNamedSchemas { + + /** Keeps only the first object data for each `SName`. Objects are first sorted in order to make sure that if multiple `SName` exists, and + * if an example isn't defined for all of them, the first instance will have an example. In case of recursive objects, the first one is + * the most complete as it contains the built-up structure, unlike subsequent ones, which only represent leaves (tapir#354). + */ + def unique(objs: Iterable[NamedSchema]): Iterable[NamedSchema] = { + val sortedObj = objs.toList.sortBy { case (name, _, maybeExample) => (name.fullName, maybeExample.isEmpty) } + val seen: collection.mutable.Set[TSchema.SName] = collection.mutable.Set() + val result: ListBuffer[NamedSchema] = ListBuffer() + sortedObj.foreach { obj => + if (!seen.contains(obj._1)) { + seen.add(obj._1) + result += obj + } + } + result.toList + } +} diff --git a/openrpc/src/io/iohk/armadillo/openrpc/package.scala b/openrpc/src/io/iohk/armadillo/openrpc/package.scala new file mode 100644 index 0000000..7235ed3 --- /dev/null +++ b/openrpc/src/io/iohk/armadillo/openrpc/package.scala @@ -0,0 +1,70 @@ +package io.iohk.armadillo + +import sttp.apispec.{ExampleMultipleValue, ExampleSingleValue, ExampleValue} +import sttp.tapir.{Codec, Schema => TSchema, SchemaType => TSchemaType} + +import scala.collection.immutable + +package object openrpc { + implicit class SortListMap[K, V](m: immutable.ListMap[K, V]) { + def sortByKey(implicit ko: Ordering[K]): immutable.ListMap[K, V] = sortBy(_._1) + def sortBy[B: Ordering](f: ((K, V)) => B): immutable.ListMap[K, V] = { + m.toList.sortBy(f).toListMap + } + } + + implicit class IterableToListMap[A](xs: Iterable[A]) { + def toListMap[T, U](implicit ev: A <:< (T, U)): immutable.ListMap[T, U] = { + val b = immutable.ListMap.newBuilder[T, U] + for (x <- xs) + b += x + + b.result() + } + } + + private[openrpc] def propagateMetadataForOption[T, E](schema: TSchema[T], opt: TSchemaType.SOption[T, E]): TSchemaType.SOption[T, E] = { + opt.copy( + element = opt.element.copy( + description = schema.description.orElse(opt.element.description), + format = schema.format.orElse(opt.element.format), + deprecated = schema.deprecated || opt.element.deprecated, + encodedExample = schema.encodedExample.orElse(opt.element.encodedExample) + ) + )(opt.toOption) + } + private def rawToString[T](v: Any): String = v.toString + private[openrpc] def exampleValue[T](v: String): ExampleValue = ExampleSingleValue(v) + private[openrpc] def exampleValue[T](codec: Codec[_, T, _], e: T): Option[ExampleValue] = exampleValue(codec.schema, codec.encode(e)) + private[openrpc] def exampleValue[T](schema: TSchema[_], raw: Any): Option[ExampleValue] = { + (raw, schema.schemaType) match { + case (it: Iterable[_], TSchemaType.SArray(_)) => Some(ExampleMultipleValue(it.map(rawToString).toList)) + case (it: Iterable[_], _) => it.headOption.map(v => ExampleSingleValue(rawToString(v))) + case (it: Option[_], _) => it.map(v => ExampleSingleValue(rawToString(v))) + case (v, _) => Some(ExampleSingleValue(rawToString(v))) + } + } + + private[openrpc] def exampleValue[T](jsonRpcCodec: JsonRpcCodec[T], examples: Set[T]): Option[ExampleValue] = { + val filtered = if (jsonRpcCodec.schema.isOptional) { + examples + .collect { case Some(t) => + Some(t) + } + .asInstanceOf[Set[T]] + } else { + examples + } + if (filtered.isEmpty) { + None + } else { + Some( + ExampleMultipleValue( + filtered.map { example => + jsonRpcCodec.show(jsonRpcCodec.encode(example)) + }.toList + ) + ) + } + } +} diff --git a/openrpc/test/resources/array_of_recursive_optional_result.yaml b/openrpc/test/resources/array_of_recursive_optional_result.yaml new file mode 100644 index 0000000..b24769c --- /dev/null +++ b/openrpc/test/resources/array_of_recursive_optional_result.yaml @@ -0,0 +1,21 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: [] + result: + name: p1 + schema: + type: array + items: + $ref: '#/components/schemas/F3' +components: + schemas: + F3: + title: F3 + type: object + properties: + data: + $ref: '#/components/schemas/F3' \ No newline at end of file diff --git a/openrpc/test/resources/basic.json b/openrpc/test/resources/basic.json new file mode 100644 index 0000000..46504a6 --- /dev/null +++ b/openrpc/test/resources/basic.json @@ -0,0 +1,28 @@ +{ + "openrpc": "1.2.1", + "info": { + "version": "1.0.0", + "title": "Demo Pet Store" + }, + "methods": [ + { + "name": "hello", + "params": [ + { + "name": "param1", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } + } + ], + "result": { + "name": "response", + "schema": { + "type": "string" + } + } + } + ] +} \ No newline at end of file diff --git a/openrpc/test/resources/basic.yaml b/openrpc/test/resources/basic.yaml new file mode 100644 index 0000000..b57fde8 --- /dev/null +++ b/openrpc/test/resources/basic.yaml @@ -0,0 +1,16 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: hello + params: + - name: param1 + required: true + schema: + type: integer + format: int32 + result: + name: response + schema: + type: string diff --git a/openrpc/test/resources/basic_with_multiple_examples.json b/openrpc/test/resources/basic_with_multiple_examples.json new file mode 100644 index 0000000..78a3fea --- /dev/null +++ b/openrpc/test/resources/basic_with_multiple_examples.json @@ -0,0 +1,46 @@ +{ + "openrpc": "1.2.1", + "info": { + "version": "1.0.0", + "title": "Demo Pet Store" + }, + "methods": [ + { + "name": "hello", + "params": [ + { + "name": "param1", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "examples": [ + 42, + 43 + ] + } + }, + { + "name": "param2", + "required": false, + "schema": { + "type": "string", + "examples": [ + "test" + ] + } + } + ], + "result": { + "name": "response", + "schema": { + "type": "string", + "examples": [ + "ok", + "ko" + ] + } + } + } + ] +} \ No newline at end of file diff --git a/openrpc/test/resources/basic_with_single_example.json b/openrpc/test/resources/basic_with_single_example.json new file mode 100644 index 0000000..6f8770f --- /dev/null +++ b/openrpc/test/resources/basic_with_single_example.json @@ -0,0 +1,44 @@ +{ + "openrpc": "1.2.1", + "info": { + "version": "1.0.0", + "title": "Demo Pet Store" + }, + "methods": [ + { + "name": "hello", + "params": [ + { + "name": "param1", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "examples": [ + 42 + ] + } + }, + { + "name": "param2", + "required": false, + "schema": { + "type": "string", + "examples": [ + "test" + ] + } + } + ], + "result": { + "name": "response", + "schema": { + "type": "string", + "examples": [ + "ok" + ] + } + } + } + ] +} \ No newline at end of file diff --git a/openrpc/test/resources/custom_encoder.json b/openrpc/test/resources/custom_encoder.json new file mode 100644 index 0000000..5fe6f3a --- /dev/null +++ b/openrpc/test/resources/custom_encoder.json @@ -0,0 +1,61 @@ +{ + "openrpc": "1.2.1", + "info": { + "version": "1.0.0", + "title": "Demo Pet Store" + }, + "methods": [ + { + "name": "createHuman", + "params": [ + { + "name": "human", + "required": true, + "schema": { + "$ref": "#/components/schemas/Human" + } + }, + { + "name": "data", + "required": false, + "schema": { + "type": "string", + "format": "binary", + "examples": [ + "data: some_data" + ] + } + } + ], + "result": { + "name": "result", + "schema": { + "type": "boolean" + } + } + } + ], + "components": { + "schemas": { + "Human": { + "title": "Human", + "required": [ + "name", + "nickname" + ], + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "nickname": { + "type": "string" + } + }, + "examples": [ + "Unknown" + ] + } + } + } +} \ No newline at end of file diff --git a/openrpc/test/resources/custom_encoder_with_simplified_version.json b/openrpc/test/resources/custom_encoder_with_simplified_version.json new file mode 100644 index 0000000..c777295 --- /dev/null +++ b/openrpc/test/resources/custom_encoder_with_simplified_version.json @@ -0,0 +1,79 @@ +{ + "openrpc": "1.2.1", + "info": { + "version": "1.0.0", + "title": "Demo Pet Store" + }, + "methods": [ + { + "name": "createHuman", + "params": [ + { + "name": "human", + "required": true, + "schema": { + "$ref": "#/components/schemas/Human" + } + }, + { + "name": "data", + "required": false, + "schema": { + "type": "string", + "format": "binary", + "examples": [ + "data: some_data" + ] + } + } + ], + "result": { + "name": "result", + "schema": { + "type": "boolean" + } + } + }, + { + "name": "createHumanSimplified", + "params": [ + { + "name": "human", + "required": true, + "schema": { + "$ref": "#/components/schemas/Human" + } + } + ], + "result": { + "name": "result", + "schema": { + "type": "boolean" + } + } + } + ], + "components": { + "schemas": { + "Human": { + "title": "Human", + "required": [ + "name", + "nickname" + ], + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "nickname": { + "type": "string" + } + }, + "examples": [ + "Unknown" + ] + } + } + } +} \ No newline at end of file diff --git a/openrpc/test/resources/empty.json b/openrpc/test/resources/empty.json new file mode 100644 index 0000000..66f1bfa --- /dev/null +++ b/openrpc/test/resources/empty.json @@ -0,0 +1,20 @@ +{ + "openrpc": "1.2.1", + "info": { + "version": "1.0.0", + "title": "Demo Pet Store" + }, + "methods": [ + { + "name": "empty", + "params": [ + ], + "result": { + "name": "empty result", + "schema": { + + } + } + } + ] +} \ No newline at end of file diff --git a/openrpc/test/resources/empty.yaml b/openrpc/test/resources/empty.yaml new file mode 100644 index 0000000..d02c233 --- /dev/null +++ b/openrpc/test/resources/empty.yaml @@ -0,0 +1,10 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: empty + params: [] + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/external_ref.yaml b/openrpc/test/resources/external_ref.yaml new file mode 100644 index 0000000..ac40a2b --- /dev/null +++ b/openrpc/test/resources/external_ref.yaml @@ -0,0 +1,14 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: ext + params: + - name: problem + required: true + schema: + $ref: https://example.com/models/model.yaml#/Problem + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/generic_product.yaml b/openrpc/test/resources/generic_product.yaml new file mode 100644 index 0000000..28a38a2 --- /dev/null +++ b/openrpc/test/resources/generic_product.yaml @@ -0,0 +1,37 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: param1 + required: true + schema: + $ref: '#/components/schemas/G_String' + - name: param2 + required: true + schema: + $ref: '#/components/schemas/G_Int' + result: + name: empty result + schema: {} +components: + schemas: + G_Int: + title: G_Int + required: + - data + type: object + properties: + data: + type: integer + format: int32 + G_String: + title: G_String + required: + - data + type: object + properties: + data: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/multiple_endpoints.yaml b/openrpc/test/resources/multiple_endpoints.yaml new file mode 100644 index 0000000..f729d64 --- /dev/null +++ b/openrpc/test/resources/multiple_endpoints.yaml @@ -0,0 +1,27 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: hello + params: + - name: param1 + required: true + schema: + type: integer + format: int32 + result: + name: response + schema: + type: string +- name: hello2 + params: + - name: param1 + required: true + schema: + type: integer + format: int32 + result: + name: response + schema: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/multiple_params.yaml b/openrpc/test/resources/multiple_params.yaml new file mode 100644 index 0000000..0563582 --- /dev/null +++ b/openrpc/test/resources/multiple_params.yaml @@ -0,0 +1,20 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: hello + params: + - name: param1 + required: true + schema: + type: integer + format: int32 + - name: param2 + required: true + schema: + type: string + result: + name: response + schema: + type: string diff --git a/openrpc/test/resources/nested_product.yaml b/openrpc/test/resources/nested_product.yaml new file mode 100644 index 0000000..41aaa7a --- /dev/null +++ b/openrpc/test/resources/nested_product.yaml @@ -0,0 +1,64 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: book + required: true + schema: + $ref: '#/components/schemas/Book' + result: + name: empty result + schema: {} +components: + schemas: + Author: + title: Author + required: + - name + - country + type: object + properties: + name: + type: string + country: + $ref: '#/components/schemas/Country' + Book: + title: Book + required: + - title + - genre + - year + - author + type: object + properties: + title: + type: string + genre: + $ref: '#/components/schemas/Genre' + year: + type: integer + format: int32 + author: + $ref: '#/components/schemas/Author' + Country: + title: Country + required: + - name + type: object + properties: + name: + type: string + Genre: + title: Genre + required: + - name + - description + type: object + properties: + name: + type: string + description: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/one_of_fixed_errors.yaml b/openrpc/test/resources/one_of_fixed_errors.yaml new file mode 100644 index 0000000..cd38745 --- /dev/null +++ b/openrpc/test/resources/one_of_fixed_errors.yaml @@ -0,0 +1,17 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: oneOf + params: [] + errors: + - code: 201 + message: error1 + - code: 202 + message: error2 + result: + name: p1 + schema: + type: integer + format: int32 diff --git a/openrpc/test/resources/one_of_fixed_errors_with_data.yaml b/openrpc/test/resources/one_of_fixed_errors_with_data.yaml new file mode 100644 index 0000000..c61c1ce --- /dev/null +++ b/openrpc/test/resources/one_of_fixed_errors_with_data.yaml @@ -0,0 +1,32 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: oneOf + params: [] + errors: + - code: 201 + message: error1 + data: + $ref: '#/components/schemas/ErrorInfo' + - code: 202 + message: error2 + data: + $ref: '#/components/schemas/ErrorInfo' + result: + name: p1 + schema: + type: integer + format: int32 +components: + schemas: + ErrorInfo: + title: ErrorInfo + required: + - bugId + type: object + properties: + bugId: + type: integer + format: int32 \ No newline at end of file diff --git a/openrpc/test/resources/optional_param.yaml b/openrpc/test/resources/optional_param.yaml new file mode 100644 index 0000000..3ca0835 --- /dev/null +++ b/openrpc/test/resources/optional_param.yaml @@ -0,0 +1,16 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: hello + params: + - name: param1 + required: false + schema: + type: integer + format: int32 + result: + name: response + schema: + type: string diff --git a/openrpc/test/resources/optional_product.yaml b/openrpc/test/resources/optional_product.yaml new file mode 100644 index 0000000..db685f0 --- /dev/null +++ b/openrpc/test/resources/optional_product.yaml @@ -0,0 +1,24 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: pet + required: false + schema: + $ref: '#/components/schemas/Pet' + result: + name: empty result + schema: {} +components: + schemas: + Pet: + title: Pet + required: + - name + type: object + properties: + name: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/optional_recursive_result.yaml b/openrpc/test/resources/optional_recursive_result.yaml new file mode 100644 index 0000000..c22871e --- /dev/null +++ b/openrpc/test/resources/optional_recursive_result.yaml @@ -0,0 +1,30 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: [] + result: + name: p1 + schema: + $ref: '#/components/schemas/F2OrNull' +components: + schemas: + F2: + title: F2 + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/F2' + F2OrNull: + title: F2OrNull + oneOf: + - $ref: '#/components/schemas/F2' + - $ref: '#/components/schemas/Null' + 'Null': + title: 'Null' + type: object + description: 'null' \ No newline at end of file diff --git a/openrpc/test/resources/optional_result_product.yaml b/openrpc/test/resources/optional_result_product.yaml new file mode 100644 index 0000000..ca205c0 --- /dev/null +++ b/openrpc/test/resources/optional_result_product.yaml @@ -0,0 +1,30 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPet + params: [] + result: + name: pet + schema: + $ref: '#/components/schemas/PetOrNull' +components: + schemas: + 'Null': + title: 'Null' + type: object + description: 'null' + Pet: + title: Pet + required: + - name + type: object + properties: + name: + type: string + PetOrNull: + title: PetOrNull + oneOf: + - $ref: '#/components/schemas/Null' + - $ref: '#/components/schemas/Pet' \ No newline at end of file diff --git a/openrpc/test/resources/product.yaml b/openrpc/test/resources/product.yaml new file mode 100644 index 0000000..6151b7a --- /dev/null +++ b/openrpc/test/resources/product.yaml @@ -0,0 +1,24 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: pet + required: true + schema: + $ref: '#/components/schemas/Pet' + result: + name: empty result + schema: {} +components: + schemas: + Pet: + title: Pet + required: + - name + type: object + properties: + name: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/product_array.yaml b/openrpc/test/resources/product_array.yaml new file mode 100644 index 0000000..9f2c439 --- /dev/null +++ b/openrpc/test/resources/product_array.yaml @@ -0,0 +1,26 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: pet + required: true + schema: + type: array + items: + $ref: '#/components/schemas/Pet' + result: + name: empty result + schema: {} +components: + schemas: + Pet: + title: Pet + required: + - name + type: object + properties: + name: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/product_duplicated_names.yaml b/openrpc/test/resources/product_duplicated_names.yaml new file mode 100644 index 0000000..89557ff --- /dev/null +++ b/openrpc/test/resources/product_duplicated_names.yaml @@ -0,0 +1,37 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: param1 + required: true + schema: + $ref: '#/components/schemas/Pet' + - name: param2 + required: true + schema: + $ref: '#/components/schemas/Pet1' + result: + name: empty result + schema: {} +components: + schemas: + Pet: + title: Pet + required: + - name + type: object + properties: + name: + type: string + Pet1: + title: Pet1 + required: + - age + type: object + properties: + age: + type: integer + format: int32 \ No newline at end of file diff --git a/openrpc/test/resources/product_with_meta.yaml b/openrpc/test/resources/product_with_meta.yaml new file mode 100644 index 0000000..ffb04e3 --- /dev/null +++ b/openrpc/test/resources/product_with_meta.yaml @@ -0,0 +1,26 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: pet + required: true + schema: + $ref: '#/components/schemas/CustomPetName' + result: + name: empty result + schema: {} +components: + schemas: + CustomPetName: + title: CustomPetName + required: + - name + type: object + properties: + name: + type: string + description: Schema description + deprecated: true \ No newline at end of file diff --git a/openrpc/test/resources/recursive_product.yaml b/openrpc/test/resources/recursive_product.yaml new file mode 100644 index 0000000..534891e --- /dev/null +++ b/openrpc/test/resources/recursive_product.yaml @@ -0,0 +1,24 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: p1 + required: true + schema: + $ref: '#/components/schemas/F1' + result: + name: empty result + schema: {} +components: + schemas: + F1: + title: F1 + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/F1' \ No newline at end of file diff --git a/openrpc/test/resources/result_product.yaml b/openrpc/test/resources/result_product.yaml new file mode 100644 index 0000000..cf7bf2e --- /dev/null +++ b/openrpc/test/resources/result_product.yaml @@ -0,0 +1,21 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPet + params: [] + result: + name: pet + schema: + $ref: '#/components/schemas/Pet' +components: + schemas: + Pet: + title: Pet + required: + - name + type: object + properties: + name: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/single_fixed_error.yaml b/openrpc/test/resources/single_fixed_error.yaml new file mode 100644 index 0000000..4e3c40e --- /dev/null +++ b/openrpc/test/resources/single_fixed_error.yaml @@ -0,0 +1,19 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: hello + params: + - name: param1 + required: true + schema: + type: integer + format: int32 + errors: + - code: 100 + message: My fixed error + result: + name: response + schema: + type: string diff --git a/openrpc/test/resources/single_fixed_error_with_data.yaml b/openrpc/test/resources/single_fixed_error_with_data.yaml new file mode 100644 index 0000000..88a138c --- /dev/null +++ b/openrpc/test/resources/single_fixed_error_with_data.yaml @@ -0,0 +1,21 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: hello + params: + - name: param1 + required: true + schema: + type: integer + format: int32 + errors: + - code: 100 + message: My fixed error + data: + type: string + result: + name: response + schema: + type: string diff --git a/openrpc/test/resources/sorted_basic_empty.json b/openrpc/test/resources/sorted_basic_empty.json new file mode 100644 index 0000000..a44274f --- /dev/null +++ b/openrpc/test/resources/sorted_basic_empty.json @@ -0,0 +1,39 @@ +{ + "openrpc": "1.2.1", + "info": { + "version": "1.0.0", + "title": "Demo Pet Store" + }, + "methods": [ + { + "name": "empty", + "params": [ + ], + "result": { + "name": "empty result", + "schema": { + + } + } + }, + { + "name": "hello", + "params": [ + { + "name": "param1", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } + } + ], + "result": { + "name": "response", + "schema": { + "type": "string" + } + } + } + ] +} diff --git a/openrpc/test/resources/sum.yaml b/openrpc/test/resources/sum.yaml new file mode 100644 index 0000000..4b10f24 --- /dev/null +++ b/openrpc/test/resources/sum.yaml @@ -0,0 +1,80 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: createPet + params: + - name: animal + required: true + schema: + $ref: '#/components/schemas/Animal' + result: + name: empty result + schema: {} +components: + schemas: + Amphibian: + title: Amphibian + required: + - name + type: object + properties: + name: + type: string + Animal: + title: Animal + oneOf: + - $ref: '#/components/schemas/Amphibian' + - $ref: '#/components/schemas/Bird' + - $ref: '#/components/schemas/Fish' + - $ref: '#/components/schemas/Invertebrate' + - $ref: '#/components/schemas/Mammal' + - $ref: '#/components/schemas/Reptile' + Bird: + title: Bird + required: + - name + - canFly + type: object + properties: + name: + type: string + canFly: + type: boolean + Fish: + title: Fish + required: + - name + type: object + properties: + name: + type: string + Invertebrate: + title: Invertebrate + required: + - name + - numberOfLegs + type: object + properties: + name: + type: string + numberOfLegs: + type: integer + format: int32 + Mammal: + title: Mammal + required: + - name + type: object + properties: + name: + type: string + Reptile: + title: Reptile + required: + - name + type: object + properties: + name: + type: string \ No newline at end of file diff --git a/openrpc/test/resources/validatedAll.yaml b/openrpc/test/resources/validatedAll.yaml new file mode 100644 index 0000000..a7c71d6 --- /dev/null +++ b/openrpc/test/resources/validatedAll.yaml @@ -0,0 +1,17 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: parameter + required: true + schema: + type: string + pattern: \w+ + minLength: 1 + maxLength: 10 + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/validatedArrays.yaml b/openrpc/test/resources/validatedArrays.yaml new file mode 100644 index 0000000..b81cc34 --- /dev/null +++ b/openrpc/test/resources/validatedArrays.yaml @@ -0,0 +1,24 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: array1 + required: true + schema: + type: array + items: + type: string + minItems: 1 + - name: array2 + required: true + schema: + type: array + items: + type: string + maxItems: 10 + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/validatedCustom.yaml b/openrpc/test/resources/validatedCustom.yaml new file mode 100644 index 0000000..74cea3e --- /dev/null +++ b/openrpc/test/resources/validatedCustom.yaml @@ -0,0 +1,15 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: parameter + required: true + schema: + type: string + description: Value needs to be correct + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/validatedEnumerations.yaml b/openrpc/test/resources/validatedEnumerations.yaml new file mode 100644 index 0000000..04c2aee --- /dev/null +++ b/openrpc/test/resources/validatedEnumerations.yaml @@ -0,0 +1,22 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: enum + required: true + schema: + $ref: '#/components/schemas/Enum' + result: + name: empty result + schema: {} +components: + schemas: + Enum: + title: Enum + type: string + enum: + - Val1 + - Val2 \ No newline at end of file diff --git a/openrpc/test/resources/validatedInts.yaml b/openrpc/test/resources/validatedInts.yaml new file mode 100644 index 0000000..614ffaf --- /dev/null +++ b/openrpc/test/resources/validatedInts.yaml @@ -0,0 +1,22 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: number1 + required: true + schema: + type: integer + format: int32 + minimum: 1.0 + - name: number2 + required: true + schema: + type: integer + format: int32 + maximum: 10.0 + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/validatedIntsWithExclusives.yaml b/openrpc/test/resources/validatedIntsWithExclusives.yaml new file mode 100644 index 0000000..7867fcf --- /dev/null +++ b/openrpc/test/resources/validatedIntsWithExclusives.yaml @@ -0,0 +1,22 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: number1 + required: true + schema: + type: integer + format: int32 + exclusiveMinimum: 1.0 + - name: number2 + required: true + schema: + type: integer + format: int32 + exclusiveMaximum: 10.0 + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/validatedMapped.yaml b/openrpc/test/resources/validatedMapped.yaml new file mode 100644 index 0000000..e842509 --- /dev/null +++ b/openrpc/test/resources/validatedMapped.yaml @@ -0,0 +1,21 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: methodWithNumberFromString + params: + - name: numberFromString + required: true + schema: + $ref: '#/components/schemas/NumberFromString' + result: + name: empty result + schema: {} +components: + schemas: + NumberFromString: + title: NumberFromString + type: string + pattern: ^[0-9]$ + maxLength: 10 diff --git a/openrpc/test/resources/validatedStringEnumeration.yaml b/openrpc/test/resources/validatedStringEnumeration.yaml new file mode 100644 index 0000000..de71cc3 --- /dev/null +++ b/openrpc/test/resources/validatedStringEnumeration.yaml @@ -0,0 +1,18 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: enum + required: true + schema: + type: string + enum: + - opt1 + - opt2 + - opt3 + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/validatedStrings.yaml b/openrpc/test/resources/validatedStrings.yaml new file mode 100644 index 0000000..8d2070c --- /dev/null +++ b/openrpc/test/resources/validatedStrings.yaml @@ -0,0 +1,25 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: getPetByNumber + params: + - name: string1 + required: true + schema: + type: string + minLength: 1 + - name: string2 + required: true + schema: + type: string + maxLength: 10 + - name: string3 + required: true + schema: + type: string + pattern: \w+ + result: + name: empty result + schema: {} diff --git a/openrpc/test/resources/with_info.yaml b/openrpc/test/resources/with_info.yaml new file mode 100644 index 0000000..e2988f9 --- /dev/null +++ b/openrpc/test/resources/with_info.yaml @@ -0,0 +1,30 @@ +openrpc: 1.2.1 +info: + version: 1.0.0 + title: Demo Pet Store +methods: +- name: hello + tags: + - name: The name of the tag + summary: A short summary of the tag + description: A verbose explanation for the tag + externalDocs: + url: http://example.com + description: A verbose explanation of the target documentation + summary: A short summary of what the method does + description: A verbose explanation of the method behavior + params: + - name: param1 + summary: A short summary of the content that is being described + description: A verbose explanation of the content descriptor behavior + required: true + schema: + type: integer + format: int32 + deprecated: true + result: + name: response + summary: A short summary of the content that is being described + description: A verbose explanation of the content descriptor behavior + schema: + type: string diff --git a/openrpc/test/src/io/iohk/armadillo/openrpc/Animal.scala b/openrpc/test/src/io/iohk/armadillo/openrpc/Animal.scala new file mode 100644 index 0000000..346fb67 --- /dev/null +++ b/openrpc/test/src/io/iohk/armadillo/openrpc/Animal.scala @@ -0,0 +1,17 @@ +package io.iohk.armadillo.openrpc + +sealed trait Animal + +object Animal { + final case class Amphibian(name: String) extends Animal + + final case class Bird(name: String, canFly: Boolean) extends Animal + + final case class Fish(name: String) extends Animal + + final case class Invertebrate(name: String, numberOfLegs: Int) extends Animal + + final case class Mammal(name: String) extends Animal + + final case class Reptile(name: String) extends Animal +} diff --git a/openrpc/test/src/io/iohk/armadillo/openrpc/Basic.scala b/openrpc/test/src/io/iohk/armadillo/openrpc/Basic.scala new file mode 100644 index 0000000..93d9bc0 --- /dev/null +++ b/openrpc/test/src/io/iohk/armadillo/openrpc/Basic.scala @@ -0,0 +1,245 @@ +package io.iohk.armadillo.openrpc + +import io.circe.generic.auto._ +import io.circe.{Encoder, Json} +import io.iohk.armadillo._ +import io.iohk.armadillo.json.circe._ +import sttp.tapir.Schema.derivedEnumeration +import sttp.tapir.generic.auto._ +import sttp.tapir.{Schema, SchemaType, ValidationResult, Validator} + +object Basic { + val basic: JsonRpcEndpoint[Int, Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1")) + .out[String]("response") + + val basicWithSingleExample: JsonRpcEndpoint[(Int, Option[String]), Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1").example(42).and(param[Option[String]]("param2").example(Some("test")))) + .out(result[String]("response").example("ok")) + + val basicWithMultipleExamples: JsonRpcEndpoint[(Int, Option[String]), Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1").examples(Set(42, 43)).and(param[Option[String]]("param2").examples(Set(Some("test"), None)))) + .out(result[String]("response").examples(Set("ok", "ko"))) + + val multiple_params: JsonRpcEndpoint[(Int, String), Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1") and param[String]("param2")) + .out[String]("response") + + val withInfo: JsonRpcEndpoint[Int, Unit, String] = jsonRpcEndpoint(m"hello") + .description("A verbose explanation of the method behavior") + .summary("A short summary of what the method does") + .tag( + JsonRpcEndpointTag("The name of the tag") + .summary("A short summary of the tag") + .description("A verbose explanation for the tag") + .externalDocs( + JsonRpcEndpointExternalDocs("http://example.com") + .description("A verbose explanation of the target documentation") + ) + ) + .in( + param[Int]("param1") + .summary("A short summary of the content that is being described") + .deprecated() + .description("A verbose explanation of the content descriptor behavior") + ) + .out( + result[String]("response") + .summary("A short summary of the content that is being described") + .description("A verbose explanation of the content descriptor behavior") + ) + + val optionalParam: JsonRpcEndpoint[Option[Int], Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Option[Int]]("param1")) + .out[String]("response") + + val empty: JsonRpcEndpoint[Unit, Unit, Unit] = jsonRpcEndpoint(m"empty") + + case class Pet(name: String) + + val product: JsonRpcEndpoint[Pet, Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[Pet]("pet")) + + val optionalProduct: JsonRpcEndpoint[Option[Pet], Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[Option[Pet]]("pet")) + + val product_with_meta: JsonRpcEndpoint[Pet, Unit, Unit] = { + implicit val schema: Schema[Pet] = sttp.tapir.Schema + .derived[Pet] + .description("Schema description") + .deprecated(true) + .name(Schema.SName("CustomPetName")) + jsonRpcEndpoint(m"createPet") + .in(param[Pet]("pet")) + } + + case class Problem() + + object Problem { + implicit val schema: Schema[Problem] = + Schema[Problem]( + SchemaType.SRef( + Schema.SName("https://example.com/models/model.yaml#/Problem") + ) + ) + } + + val external_ref: JsonRpcEndpoint[Problem, Unit, Unit] = jsonRpcEndpoint(m"ext") + .in(param[Problem]("problem")) + + val productArray: JsonRpcEndpoint[List[Pet], Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[List[Pet]]("pet")) + + case class Country(name: String) + case class Author(name: String, country: Country) + case class Genre(name: String, description: String) + case class Book(title: String, genre: Genre, year: Int, author: Author) + + val nestedProducts: JsonRpcEndpoint[Book, Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[Book]("book")) + + val productDuplicatedNames: JsonRpcEndpoint[(Pet, openrpc.Pet), Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[Pet]("param1") and param[io.iohk.armadillo.openrpc.Pet]("param2")) + + case class F1(data: List[F1]) + + val recursiveProduct: JsonRpcEndpoint[F1, Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[F1]("p1")) + + case class G[T](data: T) + + val genericProduct: JsonRpcEndpoint[(G[String], G[Int]), Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[G[String]]("param1") and param[G[Int]]("param2")) + + val optionalResultProduct: JsonRpcEndpoint[Unit, Unit, Option[Pet]] = jsonRpcEndpoint(m"getPet") + .out(result[Option[Pet]]("pet")) + + val resultProduct: JsonRpcEndpoint[Unit, Unit, Pet] = jsonRpcEndpoint(m"getPet") + .out(result[Pet]("pet")) + + case class F2(data: List[F2]) + + val optionalRecursiveResult: JsonRpcEndpoint[Unit, Unit, Option[F2]] = jsonRpcEndpoint(m"createPet") + .out(result[Option[F2]]("p1")) + + case class F3(data: Option[F3]) + + val arrayOfRecursiveOptionalResult: JsonRpcEndpoint[Unit, Unit, List[F3]] = jsonRpcEndpoint(m"createPet") + .out(result[List[F3]]("p1")) + + val singleFixedError: JsonRpcEndpoint[Int, Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1")) + .errorOut(fixedError[Unit](100, "My fixed error")) + .out[String]("response") + + val singleFixedErrorWithData: JsonRpcEndpoint[Int, String, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1")) + .errorOut(fixedErrorWithData[String](100, "My fixed error")) + .out[String]("response") + + val oneOfFixedErrors: JsonRpcEndpoint[Unit, Either[Unit, Unit], Int] = jsonRpcEndpoint(m"oneOf") + .errorOut( + oneOf[Either[Unit, Unit]]( + oneOfVariantValueMatcher(fixedError(201, "error1")) { case Left(_) => true }, + oneOfVariantValueMatcher(fixedError(202, "error2")) { case Right(_) => true } + ) + ) + .out(result[Int]("p1")) + + val oneOfFixedErrorsWithData: JsonRpcEndpoint[Unit, ErrorInfo, Int] = jsonRpcEndpoint(m"oneOf") + .errorOut(oneOf(oneOfVariant(fixedErrorWithData[ErrorInfo](201, "error1")), oneOfVariant(fixedErrorWithData[ErrorInfo](202, "error2")))) + .out(result[Int]("p1")) + + case class ErrorInfo(bugId: Int) + + val sum: JsonRpcEndpoint[Animal, Unit, Unit] = jsonRpcEndpoint(m"createPet") + .in(param[Animal]("animal")) + + val validatedInts = jsonRpcEndpoint(m"getPetByNumber") + .in( + param[Int]("number1").validate(Validator.min(1)) and + param[Int]("number2").validate(Validator.max(10)) + ) + + val validatedIntsWithExclusives = jsonRpcEndpoint(m"getPetByNumber") + .in( + param[Int]("number1").validate(Validator.min(1, exclusive = true)) and + param[Int]("number2").validate(Validator.max(10, exclusive = true)) + ) + + val validatedStrings = jsonRpcEndpoint(m"getPetByNumber") + .in( + param[String]("string1").validate(Validator.minLength(1)) and + param[String]("string2").validate(Validator.maxLength(10)) and + param[String]("string3").validate(Validator.pattern("\\w+")) + ) + + val validatedArrays = jsonRpcEndpoint(m"getPetByNumber") + .in( + param[Seq[String]]("array1").validate(Validator.minSize(1)) and + param[Seq[String]]("array2").validate(Validator.maxSize(10)) + ) + + val validatedStringEnumeration = jsonRpcEndpoint(m"getPetByNumber") + .in(param[String]("enum").validate(Validator.enumeration(List("opt1", "opt2", "opt3")))) + + sealed trait Enum + final case object Val1 extends Enum + final case object Val2 extends Enum + val validatedEnumeration = jsonRpcEndpoint(m"getPetByNumber") + .in( + param[Enum]("enum")(jsonRpcCodec(implicitly, implicitly, derivedEnumeration[Enum]())).validate( + Validator.enumeration(List(Val1, Val2)) + ) + ) + + val validatedAll = jsonRpcEndpoint(m"getPetByNumber") + .in( + param[String]("parameter").validate( + Validator.all( + Validator.minLength(1), + Validator.maxLength(10), + Validator.pattern("\\w+") + ) + ) + ) + + val validatedCustom = jsonRpcEndpoint(m"getPetByNumber") + .in( + param[String]("parameter").validate( + Validator.custom(_ => ValidationResult.Valid, Some("Value needs to be correct")) + ) + ) + + val validatedMapped: JsonRpcEndpoint[Int, Unit, Unit] = { + implicit val numberFromString: Schema[Int] = sttp.tapir.Schema.schemaForString + .validate(Validator.maxLength(10)) + .validate(Validator.pattern("^[0-9]$")) + .map(_.toIntOption)(_.toString) + .name(Schema.SName("NumberFromString")) + jsonRpcEndpoint(m"methodWithNumberFromString") + .in(param[Int]("numberFromString")) + } + + final case class Human(name: String, nickname: String) + object Human { + implicit val humanEncoder: Encoder[Human] = (human: Human) => Json.fromString(human.nickname) + } + + final case class Data(bytes: Array[Byte]) extends AnyVal + object Data { + implicit val dataEncoder: Encoder[Data] = (data: Data) => Json.fromString("data: " + new String(data.bytes)) + } + + val customEncoder: JsonRpcEndpoint[(Human, Option[Data]), Unit, Boolean] = jsonRpcEndpoint(m"createHuman") + .in( + param[Human]("human") + .example(Human("John", "Unknown")) + .and(param[Option[Data]]("data").examples(Set(Some(Data("some_data".getBytes)), None))) + ) + .out[Boolean]("result") + + val customSimplifiedEncoder: JsonRpcEndpoint[Human, Unit, Boolean] = jsonRpcEndpoint(m"createHumanSimplified") + .in(param[Human]("human")) + .out[Boolean]("result") +} diff --git a/openrpc/test/src/io/iohk/armadillo/openrpc/Pet.scala b/openrpc/test/src/io/iohk/armadillo/openrpc/Pet.scala new file mode 100644 index 0000000..145bc84 --- /dev/null +++ b/openrpc/test/src/io/iohk/armadillo/openrpc/Pet.scala @@ -0,0 +1,3 @@ +package io.iohk.armadillo.openrpc + +case class Pet(age: Int) diff --git a/openrpc/test/src/io/iohk/armadillo/openrpc/TestUtils.scala b/openrpc/test/src/io/iohk/armadillo/openrpc/TestUtils.scala new file mode 100644 index 0000000..aa0398e --- /dev/null +++ b/openrpc/test/src/io/iohk/armadillo/openrpc/TestUtils.scala @@ -0,0 +1,20 @@ +package io.iohk.armadillo.openrpc + +import cats.effect.{IO, Resource} + +import scala.io.Source + +object TestUtils { + private[openrpc] def load(fileName: String): Resource[IO, String] = { + Resource + .make( + IO.blocking( + Source + .fromInputStream(classOf[VerifyYamlTest.type].getResourceAsStream(s"/$fileName")) + ) + )(source => IO.delay(source.close())) + .map(_.getLines().mkString("\n")) + .map(noIndentation) + } + private[openrpc] def noIndentation(s: String): String = s.trim +} diff --git a/openrpc/test/src/io/iohk/armadillo/openrpc/VerifyJsonTest.scala b/openrpc/test/src/io/iohk/armadillo/openrpc/VerifyJsonTest.scala new file mode 100644 index 0000000..c853e76 --- /dev/null +++ b/openrpc/test/src/io/iohk/armadillo/openrpc/VerifyJsonTest.scala @@ -0,0 +1,52 @@ +package io.iohk.armadillo.openrpc + +import cats.effect.IO +import io.circe.Printer +import io.circe.syntax.EncoderOps +import io.iohk.armadillo.openrpc.Basic.{ + basic, + basicWithMultipleExamples, + basicWithSingleExample, + customEncoder, + customSimplifiedEncoder, + empty +} +import io.iohk.armadillo.openrpc.TestUtils.{load, noIndentation} +import io.iohk.armadillo.openrpc.circe._ +import io.iohk.armadillo.openrpc.model.{OpenRpcDocument, OpenRpcInfo} +import weaver.SimpleIOSuite + +object VerifyJsonTest extends SimpleIOSuite { + + private val PetStoreInfo: OpenRpcInfo = OpenRpcInfo("1.0.0", "Demo Pet Store") + + compare("basic.json", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(basic))) + compare("basic_with_single_example.json", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(basicWithSingleExample))) + compare("basic_with_multiple_examples.json", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(basicWithMultipleExamples))) + compare("empty.json", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(empty))) + compare("sorted_basic_empty.json", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(basic, empty))) + compare("custom_encoder.json", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(customEncoder))) + compare( + "custom_encoder_with_simplified_version.json", + OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(customSimplifiedEncoder, customEncoder)) + ) + + test("OpenRpcDocument's methods are ordered") { + IO.pure( + expect.same( + OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(empty, basic)), + OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(basic, empty)) + ) + ) + } + + private def compare(file: String, document: OpenRpcDocument): Unit = { + test(file) { + load(file).use { expected => + val actual = document.asJson + val actualJsonNoIndent = noIndentation(Printer.spaces2.copy(dropNullValues = true, colonLeft = "").print(actual)) + IO.delay(expect.same(expected, actualJsonNoIndent)) + } + } + } +} diff --git a/openrpc/test/src/io/iohk/armadillo/openrpc/VerifyYamlTest.scala b/openrpc/test/src/io/iohk/armadillo/openrpc/VerifyYamlTest.scala new file mode 100644 index 0000000..09f9445 --- /dev/null +++ b/openrpc/test/src/io/iohk/armadillo/openrpc/VerifyYamlTest.scala @@ -0,0 +1,64 @@ +package io.iohk.armadillo.openrpc + +import cats.effect.IO +import io.iohk.armadillo._ +import io.iohk.armadillo.openrpc.Basic._ +import io.iohk.armadillo.openrpc.TestUtils.{load, noIndentation} +import io.iohk.armadillo.openrpc.circe.yaml._ +import io.iohk.armadillo.openrpc.model.{OpenRpcDocument, OpenRpcInfo} +import weaver.SimpleIOSuite + +//verify that: +//method name is unique +//param name is unique +//error codes are unique +object VerifyYamlTest extends SimpleIOSuite { + + private val PetStoreInfo: OpenRpcInfo = OpenRpcInfo("1.0.0", "Demo Pet Store") + + compare("basic.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(basic))) + compare("multiple_params.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(multiple_params))) + compare("with_info.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(withInfo))) + compare("optional_param.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(optionalParam))) + compare("multiple_endpoints.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(basic, basic.copy(methodName = m"hello2")))) + compare("empty.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(empty))) + compare("product.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(product))) + compare("optional_product.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(optionalProduct))) + compare("product_with_meta.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(product_with_meta))) + compare("external_ref.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(external_ref))) + compare("product_array.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(productArray))) + compare("nested_product.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(nestedProducts))) + compare("product_duplicated_names.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(productDuplicatedNames))) + compare("recursive_product.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(recursiveProduct))) + compare("generic_product.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(genericProduct))) + compare("optional_result_product.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(optionalResultProduct))) + compare("result_product.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(resultProduct))) + compare("optional_recursive_result.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(optionalRecursiveResult))) + compare("array_of_recursive_optional_result.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(arrayOfRecursiveOptionalResult))) + compare("single_fixed_error.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(singleFixedError))) + compare("single_fixed_error_with_data.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(singleFixedErrorWithData))) + compare("one_of_fixed_errors.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(oneOfFixedErrors))) + compare("one_of_fixed_errors_with_data.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(oneOfFixedErrorsWithData))) + compare("sum.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(sum))) + compare("validatedInts.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedInts))) + compare("validatedIntsWithExclusives.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedIntsWithExclusives))) + compare("validatedStrings.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedStrings))) + compare("validatedArrays.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedArrays))) + compare("validatedStringEnumeration.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedStringEnumeration))) + compare("validatedEnumerations.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedEnumeration))) + compare("validatedAll.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedAll))) + compare("validatedCustom.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedCustom))) + compare("validatedMapped.yaml", OpenRpcDocsInterpreter().toOpenRpc(PetStoreInfo, List(validatedMapped))) + + private def compare(file: String, document: OpenRpcDocument, debug: Boolean = false): Unit = { + test(file) { + load(file).use { expected => + val actual = noIndentation(document.toYaml) + if (debug) { + println(actual) + } + IO.delay(expect.same(expected, actual)) + } + } + } +} diff --git a/server/fs2/readme.md b/server/fs2/readme.md new file mode 100644 index 0000000..1533ada --- /dev/null +++ b/server/fs2/readme.md @@ -0,0 +1,14 @@ +Example request to test the server: + +```curl +curl --location --request POST 'localhost:8545/' \ +--header 'Content-Type: application/json' \ +--data-raw '{ + "jsonrpc":"2.0", + "method":"eth_getBlockByNumber", + "params":{ + "blockNumber": 123123,"includeTransactions":"true" + }, + "id":1 +}' +``` \ No newline at end of file diff --git a/server/fs2/src/io/iohk/armadillo/server/fs2/Fs2Interpreter.scala b/server/fs2/src/io/iohk/armadillo/server/fs2/Fs2Interpreter.scala new file mode 100644 index 0000000..808dadc --- /dev/null +++ b/server/fs2/src/io/iohk/armadillo/server/fs2/Fs2Interpreter.scala @@ -0,0 +1,30 @@ +package io.iohk.armadillo.server.fs2 + +import cats.effect.kernel.Async +import io.iohk.armadillo.JsonRpcServerEndpoint +import io.iohk.armadillo.server.ServerInterpreter.{InterpretationError, ServerResponse} +import io.iohk.armadillo.server.{CustomInterceptors, Interceptor, JsonSupport, ServerInterpreter} +import sttp.tapir.integ.cats.CatsMonadError + +class Fs2Interpreter[F[_]: Async, Raw]( + jsonSupport: JsonSupport[Raw], + interceptors: List[Interceptor[F, Raw]] = CustomInterceptors[F, Raw]().interceptors +) { + + def toFs2Pipe(jsonRpcEndpoints: List[JsonRpcServerEndpoint[F]]): Either[InterpretationError, fs2.Pipe[F, String, Raw]] = { + implicit val monadError: CatsMonadError[F] = new CatsMonadError[F] + ServerInterpreter(jsonRpcEndpoints, jsonSupport, interceptors).map(si => stream => stream.through(toFs2Unsafe(si))) + } + + private def toFs2Unsafe(serverInterpreter: ServerInterpreter[F, Raw]): fs2.Pipe[F, String, Raw] = { stream => + stream + .flatMap { request => + fs2.Stream + .eval(serverInterpreter.dispatchRequest(request)) + .collect { + case Some(ServerResponse.Success(response)) => response + case Some(ServerResponse.Failure(response)) => response + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/BatchRequestHandler.scala b/server/src/io/iohk/armadillo/server/BatchRequestHandler.scala new file mode 100644 index 0000000..d681b46 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/BatchRequestHandler.scala @@ -0,0 +1,45 @@ +package io.iohk.armadillo.server + +import cats.syntax.all._ +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter.{ResponseHandlingStatus, ServerResponse} +import io.iohk.armadillo.server.Utils.RichMonadErrorOps +import sttp.monad.MonadError +import sttp.monad.syntax._ + +trait BatchRequestHandler[F[_], Raw] { + def apply(next: RequestHandler[F, Raw], requests: List[Json.JsonObject[Raw]], jsonSupport: JsonSupport[Raw])(implicit + monad: MonadError[F] + ): F[ResponseHandlingStatus[Raw]] +} + +object BatchRequestHandler { + def default[F[_], Raw]: BatchRequestHandler[F, Raw] = new BatchRequestHandler[F, Raw] { + override def apply(next: RequestHandler[F, Raw], requests: List[Json.JsonObject[Raw]], jsonSupport: JsonSupport[Raw])(implicit + monad: MonadError[F] + ): F[ResponseHandlingStatus[Raw]] = { + requests + .foldRight(monad.unit(List.empty[ResponseHandlingStatus[Raw]])) { case (req, accF) => + val fb = next.onDecodeSuccess(req) + fb.map2(accF)(_ :: _) + } + .map { responseStatuses => + val combinedResponseStatus = responseStatuses.traverse { // TODO add test when one batch request element is still unhandled + case ResponseHandlingStatus.Handled(resp) => Right(resp) + case ResponseHandlingStatus.Unhandled => Left(()) + } + + combinedResponseStatus match { + case Left(_) => ResponseHandlingStatus.Unhandled + case Right(responses) => + val responsesWithoutNotifications = responses.collect { case Some(response) => response.body } + if (responsesWithoutNotifications.isEmpty) { + ResponseHandlingStatus.Handled(none) + } else { + ResponseHandlingStatus.Handled(ServerResponse.Success(jsonSupport.asArray(responsesWithoutNotifications.toVector)).some) + } + } + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/BatchRequestInterceptor.scala b/server/src/io/iohk/armadillo/server/BatchRequestInterceptor.scala new file mode 100644 index 0000000..be4fe9b --- /dev/null +++ b/server/src/io/iohk/armadillo/server/BatchRequestInterceptor.scala @@ -0,0 +1,45 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import io.iohk.armadillo.server.Utils._ +import sttp.monad.MonadError +import sttp.tapir.DecodeResult + +class BatchRequestInterceptor[F[_], Raw](handler: BatchRequestHandler[F, Raw]) extends RequestInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + requestHandler: MethodInterceptor[F, Raw] => RequestHandler[F, Raw] + ): RequestHandler[F, Raw] = { + val next = requestHandler(MethodInterceptor.noop[F, Raw]()) + new RequestHandler[F, Raw] { + override def onDecodeSuccess(request: JsonSupport.Json[Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + request match { + case Json.JsonArray(values) => + val results = values.map(jsonSupport.materialize).map { + case obj: Json.JsonObject[Raw] => DecodeResult.Value(obj) + case arr: Json.JsonArray[Raw] => + DecodeResult.Mismatch("expected object got array", jsonSupport.stringify(jsonSupport.demateralize(arr))) + case Json.Other(raw) => DecodeResult.Mismatch("expected object got", jsonSupport.stringify(raw)) + } + val combinedResults = results.foldRight(DecodeResult.Value(List.empty): DecodeResult[List[Json.JsonObject[Raw]]]) { + (item, acc) => item.map2(acc)(_ :: _) + } + combinedResults match { + case failure: DecodeResult.Failure => + next.onDecodeFailure(RequestHandler.DecodeFailureContext(failure, jsonSupport.stringify(jsonSupport.demateralize(request)))) + case DecodeResult.Value(requests) => handler(next, requests, jsonSupport) + } + case _ => next.onDecodeSuccess(request) + } + } + + override def onDecodeFailure( + ctx: RequestHandler.DecodeFailureContext + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeFailure(ctx) + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/CustomInterceptors.scala b/server/src/io/iohk/armadillo/server/CustomInterceptors.scala new file mode 100644 index 0000000..89f77dd --- /dev/null +++ b/server/src/io/iohk/armadillo/server/CustomInterceptors.scala @@ -0,0 +1,25 @@ +package io.iohk.armadillo.server + +case class CustomInterceptors[F[_], Raw]( + batchRequestHandler: BatchRequestHandler[F, Raw] = BatchRequestHandler.default[F, Raw], + decodeFailureHandler: DecodeFailureHandler[Raw] = DecodeFailureHandler.default[Raw], + methodNotFoundHandler: MethodNotFoundHandler[Raw] = MethodNotFoundHandler.default[Raw], + exceptionHandler: ExceptionHandler[Raw] = ExceptionHandler.default[Raw], + invalidRequestHandler: InvalidRequestHandler[Raw] = InvalidRequestHandler.default[Raw], + overriddenEndpoints: List[EndpointOverride[F]] = List.empty, + additionalInterceptors: List[Interceptor[F, Raw]] = Nil, + serverLog: Option[ServerLog[F, Raw]] = None +) { + def interceptors: List[Interceptor[F, Raw]] = + List( + new ExceptionInterceptor[F, Raw](exceptionHandler), + new BatchRequestInterceptor[F, Raw](batchRequestHandler), + new DecodeFailureInterceptor[F, Raw](decodeFailureHandler), + new MethodNotFoundInterceptor[F, Raw](methodNotFoundHandler), + new InvalidRequestMethodInterceptor[F, Raw](invalidRequestHandler), + new InvalidRequestStructureInterceptor[F, Raw], + new OverrideInterceptor[F, Raw](overriddenEndpoints) + ) ++ + serverLog.map(new LoggingEndpointInterceptor[F, Raw](_)).toList ++ + additionalInterceptors +} diff --git a/server/src/io/iohk/armadillo/server/DecodeFailureInterceptor.scala b/server/src/io/iohk/armadillo/server/DecodeFailureInterceptor.scala new file mode 100644 index 0000000..5c81453 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/DecodeFailureInterceptor.scala @@ -0,0 +1,49 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.JsonRpcResponse +import io.iohk.armadillo.server.RequestHandler.DecodeFailureContext +import io.iohk.armadillo.server.ServerInterpreter.{ResponseHandlingStatus, ServerResponse} +import sttp.monad.MonadError +import sttp.monad.syntax._ + +class DecodeFailureInterceptor[F[_], Raw](handler: DecodeFailureHandler[Raw]) extends RequestInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + requestHandler: MethodInterceptor[F, Raw] => RequestHandler[F, Raw] + ): RequestHandler[F, Raw] = { + val next = requestHandler(MethodInterceptor.noop[F, Raw]()) + new RequestHandler[F, Raw] { + override def onDecodeSuccess(request: JsonSupport.Json[Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeSuccess(request) + } + + override def onDecodeFailure(ctx: DecodeFailureContext)(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeFailure(ctx).map { + case ResponseHandlingStatus.Unhandled => + handler(ctx, jsonSupport) + case handled => handled + } + } + } + } +} + +object DecodeFailureInterceptor { + def default[F[_], Raw]: DecodeFailureInterceptor[F, Raw] = new DecodeFailureInterceptor[F, Raw](DecodeFailureHandler.default[Raw]) +} + +trait DecodeFailureHandler[Raw] { + def apply(context: DecodeFailureContext, jsonSupport: JsonSupport[Raw]): ResponseHandlingStatus[Raw] +} +object DecodeFailureHandler { + def default[Raw]: DecodeFailureHandler[Raw] = (_: DecodeFailureContext, jsonSupport: JsonSupport[Raw]) => { + ResponseHandlingStatus.Handled( + Some( + ServerResponse.Failure( + jsonSupport.encodeResponse(JsonRpcResponse.error_v2(jsonSupport.encodeErrorNoData(ServerInterpreter.ParseError))) + ) + ) + ) + } +} diff --git a/server/src/io/iohk/armadillo/server/EndpointHandler.scala b/server/src/io/iohk/armadillo/server/EndpointHandler.scala new file mode 100644 index 0000000..f91c082 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/EndpointHandler.scala @@ -0,0 +1,27 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.EndpointHandler.{DecodeFailureContext, DecodeSuccessContext} +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import io.iohk.armadillo.{JsonRpcRequest, JsonRpcServerEndpoint} +import sttp.monad.MonadError +import sttp.tapir.DecodeResult + +trait EndpointHandler[F[_], Raw] { + def onDecodeSuccess[I, E, O](ctx: DecodeSuccessContext[F, I, E, O, Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] + def onDecodeFailure(ctx: DecodeFailureContext[F, Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] +} + +object EndpointHandler { + case class DecodeSuccessContext[F[_], I, E, O, Raw]( + endpoint: JsonRpcServerEndpoint.Full[I, E, O, F], + request: JsonRpcRequest[Json[Raw]], + input: I + ) + + case class DecodeFailureContext[F[_], Raw]( + endpoint: JsonRpcServerEndpoint[F], + request: JsonRpcRequest[Json[Raw]], + f: DecodeResult.Failure + ) +} diff --git a/server/src/io/iohk/armadillo/server/ExceptionInterceptor.scala b/server/src/io/iohk/armadillo/server/ExceptionInterceptor.scala new file mode 100644 index 0000000..65ac91d --- /dev/null +++ b/server/src/io/iohk/armadillo/server/ExceptionInterceptor.scala @@ -0,0 +1,63 @@ +package io.iohk.armadillo.server + +import cats.syntax.all._ +import io.iohk.armadillo.server.ServerInterpreter.{ResponseHandlingStatus, ServerResponse} +import io.iohk.armadillo.{AnyEndpoint, AnyRequest, JsonRpcResponse} +import sttp.monad.MonadError + +import scala.util.control.NonFatal + +class ExceptionInterceptor[F[_], Raw](handler: ExceptionHandler[Raw]) extends EndpointInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + endpointHandler: EndpointHandler[F, Raw] + ): EndpointHandler[F, Raw] = { + new EndpointHandler[F, Raw] { + override def onDecodeSuccess[I, E, O](ctx: EndpointHandler.DecodeSuccessContext[F, I, E, O, Raw])(implicit + monad: MonadError[F] + ): F[ResponseHandlingStatus[Raw]] = { + monad.handleError(endpointHandler.onDecodeSuccess(ctx)) { case NonFatal(e) => + onException(e, ctx.endpoint.endpoint, ctx.request) + } + } + + override def onDecodeFailure( + ctx: EndpointHandler.DecodeFailureContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + monad.handleError(endpointHandler.onDecodeFailure(ctx)) { case NonFatal(e) => + onException(e, ctx.endpoint.endpoint, ctx.request) + } + } + + private def onException(e: Throwable, endpoint: AnyEndpoint, request: AnyRequest)(implicit + monad: MonadError[F] + ): F[ResponseHandlingStatus[Raw]] = { + monad.suspend(monad.unit(handler(ExceptionContext(e, endpoint, request), jsonSupport))) + } + } + } +} + +case class ExceptionContext(e: Throwable, endpoint: AnyEndpoint, request: AnyRequest) + +trait ExceptionHandler[Raw] { + // Left means unhandled + def apply(ctx: ExceptionContext, jsonSupport: JsonSupport[Raw]): ResponseHandlingStatus[Raw] +} + +object ExceptionHandler { + def default[Raw]: ExceptionHandler[Raw] = (ctx: ExceptionContext, jsonSupport: JsonSupport[Raw]) => { + ctx.request.id match { + case Some(id) => + ResponseHandlingStatus.Handled( + ServerResponse + .ServerFailure( + jsonSupport.encodeResponse(JsonRpcResponse.error_v2(jsonSupport.encodeErrorNoData(ServerInterpreter.InternalError), id)) + ) + .some + ) + case None => ResponseHandlingStatus.Handled(none) + } + } +} diff --git a/server/src/io/iohk/armadillo/server/Interceptor.scala b/server/src/io/iohk/armadillo/server/Interceptor.scala new file mode 100644 index 0000000..3c8ccca --- /dev/null +++ b/server/src/io/iohk/armadillo/server/Interceptor.scala @@ -0,0 +1,56 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import io.iohk.armadillo.{JsonRpcEndpoint, JsonRpcError, JsonRpcId, JsonRpcServerEndpoint} + +trait Interceptor[F[_], Raw] + +trait RequestInterceptor[F[_], Raw] extends Interceptor[F, Raw] { + def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + requestHandler: MethodInterceptor[F, Raw] => RequestHandler[F, Raw] + ): RequestHandler[F, Raw] +} + +trait MethodOrEndpointInterceptor[F[_], Raw] extends Interceptor[F, Raw] + +trait MethodInterceptor[F[_], Raw] extends MethodOrEndpointInterceptor[F, Raw] { + def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + methodHandler: EndpointInterceptor[F, Raw] => MethodHandler[F, Raw] + ): MethodHandler[F, Raw] +} + +object MethodInterceptor { + def noop[F[_], Raw](handler: EndpointInterceptor[F, Raw] = EndpointInterceptor.noop[F, Raw]): MethodInterceptor[F, Raw] = + (_: Responder[F, Raw], _: JsonSupport[Raw], methodHandler: EndpointInterceptor[F, Raw] => MethodHandler[F, Raw]) => { + methodHandler(handler) + } +} + +trait EndpointInterceptor[F[_], Raw] extends MethodOrEndpointInterceptor[F, Raw] { + def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + endpointHandler: EndpointHandler[F, Raw] + ): EndpointHandler[F, Raw] +} + +object EndpointInterceptor { + def noop[F[_], Raw]: EndpointInterceptor[F, Raw] = + (_: Responder[F, Raw], _: JsonSupport[Raw], endpointHandler: EndpointHandler[F, Raw]) => endpointHandler +} + +trait Responder[F[_], Raw] { + def apply[E, O]( + result: Either[E, O], + endpoint: JsonRpcEndpoint[_, E, O], + requestId: Option[JsonRpcId] + ): F[ResponseHandlingStatus[Raw]] +} + +object Responder { + case class TypedOutput[F[_], E, O](endpoint: JsonRpcServerEndpoint.Full[_, E, O, F], output: Either[JsonRpcError[E], O]) +} diff --git a/server/src/io/iohk/armadillo/server/InvalidRequestHandler.scala b/server/src/io/iohk/armadillo/server/InvalidRequestHandler.scala new file mode 100644 index 0000000..722752e --- /dev/null +++ b/server/src/io/iohk/armadillo/server/InvalidRequestHandler.scala @@ -0,0 +1,22 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.JsonRpcResponse +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter.{ResponseHandlingStatus, ServerResponse} +import sttp.tapir.DecodeResult + +trait InvalidRequestHandler[Raw] { + def apply(request: Json[Raw], failure: DecodeResult.Failure, jsonSupport: JsonSupport[Raw]): ResponseHandlingStatus[Raw] +} + +object InvalidRequestHandler { + def default[Raw]: InvalidRequestHandler[Raw] = (_: Json[Raw], _: DecodeResult.Failure, jsonSupport: JsonSupport[Raw]) => { + ResponseHandlingStatus.Handled( + Some( + ServerResponse.Failure( + jsonSupport.encodeResponse(JsonRpcResponse.error_v2(jsonSupport.encodeErrorNoData(ServerInterpreter.InvalidRequest))) + ) + ) + ) + } +} diff --git a/server/src/io/iohk/armadillo/server/InvalidRequestMethodInterceptor.scala b/server/src/io/iohk/armadillo/server/InvalidRequestMethodInterceptor.scala new file mode 100644 index 0000000..33712d2 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/InvalidRequestMethodInterceptor.scala @@ -0,0 +1,32 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import sttp.monad.MonadError +import sttp.monad.syntax._ + +class InvalidRequestMethodInterceptor[F[_], Raw](invalidRequestHandler: InvalidRequestHandler[Raw]) extends MethodInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + methodHandler: EndpointInterceptor[F, Raw] => MethodHandler[F, Raw] + ): MethodHandler[F, Raw] = { + val next = methodHandler(EndpointInterceptor.noop) + new MethodHandler[F, Raw] { + override def onDecodeSuccess[I]( + ctx: MethodHandler.DecodeSuccessContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeSuccess(ctx) + } + + override def onDecodeFailure( + ctx: MethodHandler.DecodeFailureContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeFailure(ctx).map { + case ResponseHandlingStatus.Unhandled => + invalidRequestHandler(ctx.request, ctx.f, jsonSupport) + case actionTaken => actionTaken + } + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/InvalidRequestStructureInterceptor.scala b/server/src/io/iohk/armadillo/server/InvalidRequestStructureInterceptor.scala new file mode 100644 index 0000000..b2bb8f0 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/InvalidRequestStructureInterceptor.scala @@ -0,0 +1,34 @@ +package io.iohk.armadillo.server + +import cats.syntax.all._ +import io.iohk.armadillo.JsonRpcResponse +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter.{ResponseHandlingStatus, ServerResponse} +import sttp.monad.MonadError + +class InvalidRequestStructureInterceptor[F[_], Raw] extends RequestInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + requestHandler: MethodInterceptor[F, Raw] => RequestHandler[F, Raw] + ): RequestHandler[F, Raw] = { + val next = requestHandler(MethodInterceptor.noop[F, Raw]()) + new RequestHandler[F, Raw] { + override def onDecodeSuccess(request: JsonSupport.Json[Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + request match { + case obj: Json.JsonObject[Raw] => next.onDecodeSuccess(obj) + case arr: Json.JsonArray[Raw] => next.onDecodeSuccess(arr) // TODO add test + case Json.Other(_) => + val response = JsonRpcResponse.error_v2(jsonSupport.encodeErrorNoData(ServerInterpreter.InvalidRequest)) + monad.unit(ResponseHandlingStatus.Handled(ServerResponse.Failure(jsonSupport.encodeResponse(response)).some)) + } + } + + override def onDecodeFailure( + ctx: RequestHandler.DecodeFailureContext + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeFailure(ctx) + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/JsonSupport.scala b/server/src/io/iohk/armadillo/server/JsonSupport.scala new file mode 100644 index 0000000..c089f51 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/JsonSupport.scala @@ -0,0 +1,32 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo._ +import io.iohk.armadillo.server.JsonSupport.Json +import sttp.tapir.DecodeResult + +trait JsonSupport[Raw] { + def encodeErrorNoData(error: JsonRpcError.NoData): Raw + def encodeErrorWithData(error: JsonRpcError[Raw]): Raw + + def encodeResponse(e: JsonRpcResponse[Raw]): Raw + + def parse(string: String): DecodeResult[Json[Raw]] + def stringify(raw: Raw): String + + def materialize(raw: Raw): Json[Raw] + def demateralize(json: Json[Raw]): Raw + + def decodeJsonRpcRequest(raw: Json.JsonObject[Raw]): DecodeResult[JsonRpcRequest[Json[Raw]]] + + def asArray(seq: Seq[Raw]): Raw + def jsNull: Raw +} + +object JsonSupport { + sealed trait Json[Raw] + object Json { + case class JsonObject[Raw](fields: List[(String, Raw)]) extends Json[Raw] + case class JsonArray[Raw](values: Vector[Raw]) extends Json[Raw] + case class Other[Raw](raw: Raw) extends Json[Raw] + } +} diff --git a/server/src/io/iohk/armadillo/server/LoggingEndpointInterceptor.scala b/server/src/io/iohk/armadillo/server/LoggingEndpointInterceptor.scala new file mode 100644 index 0000000..0fce26a --- /dev/null +++ b/server/src/io/iohk/armadillo/server/LoggingEndpointInterceptor.scala @@ -0,0 +1,45 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import io.iohk.armadillo.{JsonRpcRequest, JsonRpcServerEndpoint} +import sttp.monad.MonadError +import sttp.monad.syntax._ + +class LoggingEndpointInterceptor[F[_], Raw](serverLog: ServerLog[F, Raw]) extends EndpointInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + endpointHandler: EndpointHandler[F, Raw] + ): EndpointHandler[F, Raw] = { + new EndpointHandler[F, Raw] { + override def onDecodeSuccess[I, E, O]( + ctx: EndpointHandler.DecodeSuccessContext[F, I, E, O, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + endpointHandler + .onDecodeSuccess(ctx) + .flatTap(response => serverLog.requestHandled(ctx, response)) + .handleError { case e: Throwable => + serverLog.exception(ctx.endpoint, ctx.request, e).flatMap(_ => monad.error(e)) + } + } + + override def onDecodeFailure( + ctx: EndpointHandler.DecodeFailureContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + endpointHandler + .onDecodeFailure(ctx) + .flatTap(response => serverLog.decodeFailure(ctx, response)) + .handleError { case e: Throwable => + serverLog.exception(ctx.endpoint, ctx.request, e).flatMap(_ => monad.error(e)) + } + } + } + } +} + +trait ServerLog[F[_], Raw] { + def requestHandled(ctx: EndpointHandler.DecodeSuccessContext[F, _, _, _, Raw], response: ResponseHandlingStatus[Raw]): F[Unit] + def exception(endpoint: JsonRpcServerEndpoint[F], request: JsonRpcRequest[Json[Raw]], e: Throwable): F[Unit] + def decodeFailure(ctx: EndpointHandler.DecodeFailureContext[F, Raw], response: ResponseHandlingStatus[Raw]): F[Unit] +} diff --git a/server/src/io/iohk/armadillo/server/MethodHandler.scala b/server/src/io/iohk/armadillo/server/MethodHandler.scala new file mode 100644 index 0000000..bc6e699 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/MethodHandler.scala @@ -0,0 +1,41 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.MethodHandler.{DecodeFailureContext, DecodeSuccessContext} +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import io.iohk.armadillo.{JsonRpcRequest, JsonRpcServerEndpoint} +import sttp.monad.MonadError +import sttp.tapir.DecodeResult + +trait MethodHandler[F[_], Raw] { + def onDecodeSuccess[I](ctx: DecodeSuccessContext[F, Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] + def onDecodeFailure(ctx: DecodeFailureContext[F, Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] +} + +object MethodHandler { + case class DecodeSuccessContext[F[_], Raw]( + endpoints: List[JsonRpcServerEndpoint[F]], + request: JsonRpcRequest[Json[Raw]] + ) + + case class DecodeFailureContext[F[_], Raw]( + endpoints: List[JsonRpcServerEndpoint[F]], + request: Json[Raw], + f: DecodeResult.Failure + ) + + def apply[F[_], Raw]( + onSuccess: DecodeSuccessContext[F, Raw] => F[ResponseHandlingStatus[Raw]], + onError: DecodeFailureContext[F, Raw] => F[ResponseHandlingStatus[Raw]] + ): MethodHandler[F, Raw] = { + new MethodHandler[F, Raw] { + override def onDecodeSuccess[I](ctx: DecodeSuccessContext[F, Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + onSuccess(ctx) + } + + override def onDecodeFailure(ctx: DecodeFailureContext[F, Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + onError(ctx) + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/MethodNotFoundHandler.scala b/server/src/io/iohk/armadillo/server/MethodNotFoundHandler.scala new file mode 100644 index 0000000..5fec09d --- /dev/null +++ b/server/src/io/iohk/armadillo/server/MethodNotFoundHandler.scala @@ -0,0 +1,27 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter.{ResponseHandlingStatus, ServerResponse} +import io.iohk.armadillo.{JsonRpcRequest, JsonRpcResponse} + +trait MethodNotFoundHandler[Raw] { + def apply(request: JsonRpcRequest[Json[Raw]], jsonSupport: JsonSupport[Raw]): ResponseHandlingStatus[Raw] +} + +object MethodNotFoundHandler { + def default[Raw]: MethodNotFoundHandler[Raw] = (request: JsonRpcRequest[Json[Raw]], jsonSupport: JsonSupport[Raw]) => { + request.id match { + case Some(id) => + ResponseHandlingStatus.Handled( + Some( + ServerResponse.Failure( + jsonSupport.encodeResponse( + JsonRpcResponse.error_v2(jsonSupport.encodeErrorNoData(ServerInterpreter.MethodNotFound), id) + ) + ) + ) + ) + case None => ResponseHandlingStatus.Handled(None) + } + } +} diff --git a/server/src/io/iohk/armadillo/server/MethodNotFoundInterceptor.scala b/server/src/io/iohk/armadillo/server/MethodNotFoundInterceptor.scala new file mode 100644 index 0000000..51e18fd --- /dev/null +++ b/server/src/io/iohk/armadillo/server/MethodNotFoundInterceptor.scala @@ -0,0 +1,32 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import sttp.monad.MonadError +import sttp.monad.syntax._ + +class MethodNotFoundInterceptor[F[_], Raw](methodNotFoundHandler: MethodNotFoundHandler[Raw]) extends MethodInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + methodHandler: EndpointInterceptor[F, Raw] => MethodHandler[F, Raw] + ): MethodHandler[F, Raw] = { + val next = methodHandler(EndpointInterceptor.noop) + new MethodHandler[F, Raw] { + override def onDecodeSuccess[I]( + ctx: MethodHandler.DecodeSuccessContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeSuccess(ctx).map { + case ResponseHandlingStatus.Unhandled => + methodNotFoundHandler(ctx.request, jsonSupport) + case handled => handled + } + } + + override def onDecodeFailure( + ctx: MethodHandler.DecodeFailureContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + next.onDecodeFailure(ctx) + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/OverrideInterceptor.scala b/server/src/io/iohk/armadillo/server/OverrideInterceptor.scala new file mode 100644 index 0000000..a23c993 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/OverrideInterceptor.scala @@ -0,0 +1,108 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.EndpointOverride.Full +import io.iohk.armadillo.{JsonRpcEndpoint, JsonRpcServerEndpoint} +import sttp.monad.MonadError + +class OverrideInterceptor[F[_], Raw](overriddenEndpoints: List[EndpointOverride[F]]) extends EndpointInterceptor[F, Raw] { + override def apply( + responder: Responder[F, Raw], + jsonSupport: JsonSupport[Raw], + endpointHandler: EndpointHandler[F, Raw] + ): EndpointHandler[F, Raw] = { + new EndpointHandler[F, Raw] { + override def onDecodeSuccess[I, E, O](ctx: EndpointHandler.DecodeSuccessContext[F, I, E, O, Raw])(implicit + monad: MonadError[F] + ): F[ServerInterpreter.ResponseHandlingStatus[Raw]] = { + overriddenEndpoints.find(_.endpoint.methodName == ctx.endpoint.endpoint.methodName) match { + case Some(oe) => + monad.flatMap(oe.asInstanceOf[Full[I, E, O, F]].logic(monad, ctx.input, ctx.endpoint))(s => + responder.apply(s, ctx.endpoint.endpoint, ctx.request.id) + ) + + case None => endpointHandler.onDecodeSuccess(ctx) + } + } + + override def onDecodeFailure(ctx: EndpointHandler.DecodeFailureContext[F, Raw])(implicit + monad: MonadError[F] + ): F[ServerInterpreter.ResponseHandlingStatus[Raw]] = endpointHandler.onDecodeFailure(ctx) + } + } +} + +case class OverridingEndpoint[I, E, O](e: JsonRpcEndpoint[I, E, O]) { + def replaceLogic[F[_]](logic: MonadError[F] => I => F[Either[E, O]]): EndpointOverride[F] = { + EndpointOverride[I, E, O, F]( + e, + { case (m, i, _) => + logic(m)(i) + } + ) + } + + def thenReturn[F[_]](o: F[Either[E, O]]): EndpointOverride[F] = { + EndpointOverride[I, E, O, F]( + e, + { case (_, _, _) => + o + } + ) + } + + def runBeforeLogic[F[_]](f: => F[Unit]): EndpointOverride[F] = { + EndpointOverride[I, E, O, F]( + e, + { case (m, i, se) => + m.flatMap(m.suspend(f))(_ => se.logic(m)(i)) + } + ) + } + + def runAfterLogic[F[_]](f: => F[Unit]): EndpointOverride[F] = { + EndpointOverride[I, E, O, F]( + e, + { case (m, i, se) => + m.flatMap(se.logic(m)(i))(output => m.map(m.suspend(f))(_ => output)) + } + ) + } +} + +abstract class EndpointOverride[F[_]]() { + type INPUT + type OUTPUT + type ERROR_OUTPUT + + def endpoint: JsonRpcEndpoint[INPUT, ERROR_OUTPUT, OUTPUT] + def logic: (MonadError[F], INPUT, JsonRpcServerEndpoint.Full[INPUT, ERROR_OUTPUT, OUTPUT, F]) => F[Either[ERROR_OUTPUT, OUTPUT]] +} + +object EndpointOverride { + type Full[_INPUT, _ERROR_OUTPUT, _OUTPUT, F[_]] = EndpointOverride[F] { + type INPUT = _INPUT + type ERROR_OUTPUT = _ERROR_OUTPUT + type OUTPUT = _OUTPUT + } + + def apply[INPUT, ERROR_OUTPUT, OUTPUT, F[_]]( + endpoint: JsonRpcEndpoint[INPUT, ERROR_OUTPUT, OUTPUT], + logic: (MonadError[F], INPUT, JsonRpcServerEndpoint.Full[INPUT, ERROR_OUTPUT, OUTPUT, F]) => F[Either[ERROR_OUTPUT, OUTPUT]] + ): EndpointOverride.Full[INPUT, ERROR_OUTPUT, OUTPUT, F] = { + type _INPUT = INPUT + type _ERROR_OUTPUT = ERROR_OUTPUT + type _OUTPUT = OUTPUT + val f = logic + val _endpoint = endpoint + new EndpointOverride[F] { + override type INPUT = _INPUT + override type ERROR_OUTPUT = _ERROR_OUTPUT + override type OUTPUT = _OUTPUT + + override def endpoint: JsonRpcEndpoint[_INPUT, _ERROR_OUTPUT, _OUTPUT] = _endpoint + + override def logic + : (MonadError[F], INPUT, JsonRpcServerEndpoint.Full[INPUT, ERROR_OUTPUT, OUTPUT, F]) => F[Either[ERROR_OUTPUT, OUTPUT]] = f + } + } +} diff --git a/server/src/io/iohk/armadillo/server/RequestHandler.scala b/server/src/io/iohk/armadillo/server/RequestHandler.scala new file mode 100644 index 0000000..b9ce7a2 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/RequestHandler.scala @@ -0,0 +1,35 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.RequestHandler.DecodeFailureContext +import io.iohk.armadillo.server.ServerInterpreter.ResponseHandlingStatus +import sttp.monad.MonadError +import sttp.tapir.DecodeResult + +trait RequestHandler[F[_], Raw] { + def onDecodeSuccess(request: Json[Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] + + def onDecodeFailure(ctx: DecodeFailureContext)(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] +} + +object RequestHandler { + case class DecodeFailureContext(failure: DecodeResult.Failure, request: String) + + def apply[F[_], Raw]( + onSuccess: Json[Raw] => F[ResponseHandlingStatus[Raw]], + onError: RequestHandler.DecodeFailureContext => F[ResponseHandlingStatus[Raw]] + ): RequestHandler[F, Raw] = { + new RequestHandler[F, Raw] { + override def onDecodeSuccess(request: Json[Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + onSuccess(request) + } + + override def onDecodeFailure( + ctx: RequestHandler.DecodeFailureContext + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + onError(ctx) + } + } + } + +} diff --git a/server/src/io/iohk/armadillo/server/ServerInterpreter.scala b/server/src/io/iohk/armadillo/server/ServerInterpreter.scala new file mode 100644 index 0000000..52ed35d --- /dev/null +++ b/server/src/io/iohk/armadillo/server/ServerInterpreter.scala @@ -0,0 +1,405 @@ +package io.iohk.armadillo.server + +import cats.syntax.all._ +import io.iohk.armadillo.JsonRpcError.NoData +import io.iohk.armadillo.server.EndpointHandler.{DecodeFailureContext, DecodeSuccessContext} +import io.iohk.armadillo.server.JsonSupport.Json +import io.iohk.armadillo.server.ServerInterpreter._ +import io.iohk.armadillo.server.Utils.RichEndpointInput +import io.iohk.armadillo.{JsonRpcErrorOutput, _} +import sttp.monad.MonadError +import sttp.monad.syntax._ +import sttp.tapir.internal.ParamsAsVector +import sttp.tapir.{DecodeResult, ValidationError} + +import scala.annotation.tailrec + +class ServerInterpreter[F[_], Raw] private ( + jsonRpcEndpoints: List[JsonRpcServerEndpoint[F]], + jsonSupport: JsonSupport[Raw], + interceptors: List[Interceptor[F, Raw]] +)(implicit + monadError: MonadError[F] +) { + + def dispatchRequest(stringRequest: String): F[Option[ServerResponse[Raw]]] = { + jsonSupport.parse(stringRequest) match { + case f: DecodeResult.Failure => + monadError.suspend( + callRequestInterceptors(interceptors, Nil, defaultResponder) + .onDecodeFailure(RequestHandler.DecodeFailureContext(f, stringRequest)) + .map { + case ResponseHandlingStatus.Handled(response) => response + case ResponseHandlingStatus.Unhandled => throw new RuntimeException(s"Unhandled request: $stringRequest") + } + ) + case DecodeResult.Value(jsonRequest) => + monadError.suspend( + callRequestInterceptors(interceptors, Nil, defaultResponder) + .onDecodeSuccess(jsonRequest) + .map { + case ResponseHandlingStatus.Handled(response) => response + case ResponseHandlingStatus.Unhandled => throw new RuntimeException(s"Unhandled request: $stringRequest") + } + ) + } + } + + private def callRequestInterceptors( + is: List[Interceptor[F, Raw]], + eisAcc: List[MethodOrEndpointInterceptor[F, Raw]], + responder: Responder[F, Raw] + ): RequestHandler[F, Raw] = { + is match { + case Nil => defaultRequestHandler(eisAcc.reverse) + case (ri: RequestInterceptor[F, Raw]) :: tail => + ri.apply( + responder, + jsonSupport, + { ei => + RequestHandler( + onSuccess = callRequestInterceptors(tail, ei :: eisAcc, responder).onDecodeSuccess, + callRequestInterceptors(tail, ei :: eisAcc, responder).onDecodeFailure + ) + } + ) + case (ei: MethodInterceptor[F, Raw]) :: tail => callRequestInterceptors(tail, ei :: eisAcc, responder) + case (ei: EndpointInterceptor[F, Raw]) :: tail => callRequestInterceptors(tail, ei :: eisAcc, responder) + case other => + throw new IllegalArgumentException(s"Unsupported interceptor! $other") + } + } + + private def defaultRequestHandler(eis: List[MethodOrEndpointInterceptor[F, Raw]]): RequestHandler[F, Raw] = { + new RequestHandler[F, Raw] { + override def onDecodeSuccess(request: Json[Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + request match { + case obj: Json.JsonObject[Raw] => handleObject(jsonRpcEndpoints, obj, eis) + case _ => monad.unit(ResponseHandlingStatus.Unhandled) + } + } + + override def onDecodeFailure( + ctx: RequestHandler.DecodeFailureContext + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + monad.unit(ResponseHandlingStatus.Unhandled) + } + } + } + + private def defaultMethodHandler(eis: List[EndpointInterceptor[F, Raw]]): MethodHandler[F, Raw] = { + new MethodHandler[F, Raw] { + override def onDecodeSuccess[I]( + ctx: MethodHandler.DecodeSuccessContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + ctx.endpoints.find(_.endpoint.methodName.asString == ctx.request.method) match { + case None => monadError.unit(ResponseHandlingStatus.Unhandled) + case Some(value) => handleObjectWithEndpoint(value, ctx.request, eis) + } + } + + override def onDecodeFailure( + ctx: MethodHandler.DecodeFailureContext[F, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + monadError.unit(ResponseHandlingStatus.Unhandled) + } + } + } + + private def handleObject( + jsonRpcEndpoints: List[JsonRpcServerEndpoint[F]], + obj: Json.JsonObject[Raw], + eis: List[MethodOrEndpointInterceptor[F, Raw]] + ): F[ResponseHandlingStatus[Raw]] = { + jsonSupport.decodeJsonRpcRequest(obj) match { + case failure: DecodeResult.Failure => + val ctx = MethodHandler.DecodeFailureContext(jsonRpcEndpoints, obj, failure) + monadError.suspend(callMethodOrEndpointInterceptors(eis, Nil, defaultResponder).onDecodeFailure(ctx)) + case DecodeResult.Value(v) => + val ctx = MethodHandler.DecodeSuccessContext(jsonRpcEndpoints, v) + monadError.suspend(callMethodOrEndpointInterceptors(eis, Nil, defaultResponder).onDecodeSuccess(ctx)) + } + } + + private def callMethodOrEndpointInterceptors( + is: List[MethodOrEndpointInterceptor[F, Raw]], + eisAcc: List[EndpointInterceptor[F, Raw]], + responder: Responder[F, Raw] + ): MethodHandler[F, Raw] = { + is match { + case Nil => defaultMethodHandler(eisAcc.reverse) + case (ri: MethodInterceptor[F, Raw]) :: tail => + ri.apply( + responder, + jsonSupport, + { ei => + MethodHandler( + onSuccess = callMethodOrEndpointInterceptors(tail, ei :: eisAcc, responder).onDecodeSuccess, + callMethodOrEndpointInterceptors(tail, ei :: eisAcc, responder).onDecodeFailure + ) + } + ) + case (ei: EndpointInterceptor[F, Raw]) :: tail => callMethodOrEndpointInterceptors(tail, ei :: eisAcc, responder) + case other => + throw new IllegalArgumentException(s"Unsupported interceptor! $other") + } + } + + private def handleObjectWithEndpoint( + se: JsonRpcServerEndpoint[F], + request: JsonRpcRequest[Json[Raw]], + eis: List[EndpointInterceptor[F, Raw]] + ): F[ResponseHandlingStatus[Raw]] = { + val handler = eis.foldRight(defaultEndpointHandler(defaultResponder, jsonSupport)) { case (interceptor, handler) => + interceptor.apply(defaultResponder, jsonSupport, handler) + } + decodeJsonRpcParamsForEndpoint(se.endpoint, request.params) match { + case e: DecodeResult.Failure => handler.onDecodeFailure(DecodeFailureContext(se, request, e)) + case DecodeResult.Value(params) => + val matchedBody = params.asAny.asInstanceOf[se.INPUT] + onDecodeSuccess[se.INPUT, se.ERROR_OUTPUT, se.OUTPUT](se, request, handler, matchedBody) + } + } + + private def onDecodeSuccess[I, E, O]( + serverEndpoint: JsonRpcServerEndpoint.Full[I, E, O, F], + request: JsonRpcRequest[Json[Raw]], + handler: EndpointHandler[F, Raw], + matchedBody: I + ): F[ResponseHandlingStatus[Raw]] = { + handler.onDecodeSuccess[serverEndpoint.INPUT, serverEndpoint.ERROR_OUTPUT, serverEndpoint.OUTPUT]( + DecodeSuccessContext[F, serverEndpoint.INPUT, serverEndpoint.ERROR_OUTPUT, serverEndpoint.OUTPUT, Raw]( + serverEndpoint, + request, + matchedBody + ) + ) + } + + private val defaultResponder: Responder[F, Raw] = new DefaultResponder + + class DefaultResponder extends Responder[F, Raw] { + def apply[E, O]( + result: Either[E, O], + endpoint: JsonRpcEndpoint[_, E, O], + requestId: Option[JsonRpcId] + ): F[ResponseHandlingStatus[Raw]] = { + wrap(result match { + case Left(value) => + val encodedError = handleErrorReturnType(jsonSupport)(value, endpoint.error) + requestId.map(id => JsonRpcErrorResponse("2.0", encodedError, Some(id))) + case Right(value) => + val encodedOutput = endpoint.output match { + case _: JsonRpcIO.Empty[O] => jsonSupport.jsNull + case o: JsonRpcIO.Single[O] => o.codec.encode(value).asInstanceOf[Raw] + } + requestId.map(JsonRpcSuccessResponse("2.0", encodedOutput, _)) + }) + } + + private def wrap(response: Option[JsonRpcResponse[Raw]]): F[ResponseHandlingStatus[Raw]] = { + val serverResponse = response match { + case Some(value) => + value match { + case successResponse @ JsonRpcSuccessResponse(_, _, _) => + ServerResponse.Success(jsonSupport.encodeResponse(successResponse)).some + case errorResponse @ JsonRpcErrorResponse(_, _, _) => + ServerResponse.Failure(jsonSupport.encodeResponse(errorResponse)).some + } + case None => None + } + monadError.unit(ResponseHandlingStatus.Handled(serverResponse)) + } + + @tailrec + private def handleErrorReturnType[I, E, O](jsonSupport: JsonSupport[Raw])( + value: E, + errorOutput: JsonRpcErrorOutput[E] + ): Raw = { + errorOutput match { + case _: JsonRpcErrorOutput.SingleNoData => + jsonSupport.encodeErrorNoData(value.asInstanceOf[NoData]) + case single: JsonRpcErrorOutput.SingleWithData[E] => + single.codec.encode(value.asInstanceOf[single.DATA]).asInstanceOf[Raw] + case JsonRpcErrorOutput.Fixed(code, message) => + jsonSupport.encodeErrorNoData(JsonRpcError.noData(code, message)) + case _: JsonRpcErrorOutput.Empty => + jsonSupport.jsNull + case JsonRpcErrorOutput.FixedWithData(code, message, codec) => + val encodedData = codec.encode(value).asInstanceOf[Raw] + jsonSupport.encodeErrorWithData(JsonRpcError.withData(code, message, encodedData)) + case JsonRpcErrorOutput.OneOf(variants, _) => + variants.find(v => v.appliesTo(value)) match { + case Some(matchedVariant) => + handleErrorReturnType(jsonSupport)( + value, + matchedVariant.output.asInstanceOf[JsonRpcErrorOutput[E]] + ) + case None => throw new IllegalArgumentException(s"OneOf variant not matched. Variants: $variants, value: $value") + } + } + } + } + + private def decodeJsonRpcParamsForEndpoint( + jsonRpcEndpoint: JsonRpcEndpoint[_, _, _], + maybeJsonParams: Option[Json[Raw]] + ): DecodeResult[ParamsAsVector] = { + maybeJsonParams match { + case Some(jsonParams) => + val result = jsonParams match { + case obj: Json.JsonObject[Raw] => + val objectCombinator = combineDecodeAsObject(jsonRpcEndpoint.input.asVectorOfBasicInputs) + jsonRpcEndpoint.paramStructure match { + case ParamStructure.Either => objectCombinator(obj) + case ParamStructure.ByName => objectCombinator(obj) + case ParamStructure.ByPosition => DecodeResult.Mismatch("json object", jsonSupport.stringify(jsonSupport.demateralize(obj))) + } + case arr: Json.JsonArray[Raw] => + val vectorCombinator = combineDecodeAsVector(jsonRpcEndpoint.input.asVectorOfBasicInputs) + jsonRpcEndpoint.paramStructure match { + case ParamStructure.Either => vectorCombinator(arr) + case ParamStructure.ByPosition => vectorCombinator(arr) + case ParamStructure.ByName => DecodeResult.Mismatch("json object", jsonSupport.stringify(jsonSupport.demateralize(arr))) + } + case Json.Other(raw) => DecodeResult.Mismatch("json array or json object", jsonSupport.stringify(raw)) + } + result.map(ParamsAsVector) + case None => DecodeResult.Value(ParamsAsVector(Vector.empty)) + } + } + + private def combineDecodeAsVector(in: Vector[JsonRpcIO.Single[_]]): Json.JsonArray[Raw] => DecodeResult[Vector[_]] = { json => + case class State(results: List[DecodeResult[_]], paramsToProcess: List[Raw]) + val ss = in.foldLeft(State(List.empty, json.values.toList)) { (acc, input) => + val codec = input.codec.asInstanceOf[JsonRpcCodec[Any]] + acc.paramsToProcess match { + case currentParam :: restOfParams => + val decoded = codec.decode(currentParam.asInstanceOf[codec.L]) + val validated = decoded.flatMap(validate(_, codec.schema.applyValidation)) + validated match { + case _: DecodeResult.Failure if codec.schema.isOptional => + acc.copy(results = acc.results :+ DecodeResult.Value(None)) + case other => State(acc.results :+ other, restOfParams) + } + case Nil if codec.schema.isOptional => acc.copy(results = acc.results :+ DecodeResult.Value(None)) + case Nil => acc.copy(results = acc.results :+ DecodeResult.Missing) + } + } + if (ss.paramsToProcess.isEmpty) { + DecodeResult.sequence(ss.results).map(_.toVector) + } else { + val msg = "Too many inputs provided" + DecodeResult.Error(msg, new RuntimeException(msg)) + } + } + + private def combineDecodeAsObject(in: Vector[JsonRpcIO.Single[_]]): Json.JsonObject[Raw] => DecodeResult[Vector[_]] = { json => + val jsonAsMap = json.fields.toMap + if (jsonAsMap.size >= in.count(_.codec.schema.isOptional) && jsonAsMap.size <= in.size) { + val ss = in.toList.map { case JsonRpcIO.Single(codec, _, name) => + jsonAsMap.get(name) match { + case Some(r) => + val decoded = codec.decode(r.asInstanceOf[codec.L]) + decoded.flatMap(validate(_, codec.schema.applyValidation)) + case None => + if (codec.schema.isOptional) { + DecodeResult.Value(None) + } else { + DecodeResult.Missing + } + } + } + DecodeResult.sequence(ss).map(_.toVector) + } else { + val msg = "Too many inputs provided" + DecodeResult.Error(msg, new RuntimeException(msg)) + } + } + + private def validate[T](v: T, applyValidation: T => List[ValidationError[_]]): DecodeResult[T] = { + val validationErrors = applyValidation(v) + if (validationErrors.isEmpty) { + DecodeResult.Value(v) + } else { + DecodeResult.InvalidValue(validationErrors) + } + } +} + +object ServerInterpreter { + val ParseError: JsonRpcError.NoData = JsonRpcError.noData(-32700, "Parse error") + val InvalidRequest: JsonRpcError.NoData = JsonRpcError.noData(-32600, "Invalid Request") + val MethodNotFound: JsonRpcError.NoData = JsonRpcError.noData(-32601, "Method not found") + val InvalidParams: JsonRpcError.NoData = JsonRpcError.noData(-32602, "Invalid params") + val InternalError: JsonRpcError.NoData = JsonRpcError.noData(-32603, "Internal error") + + def apply[F[_]: MonadError, Raw]( + jsonRpcEndpoints: List[JsonRpcServerEndpoint[F]], + jsonSupport: JsonSupport[Raw], + interceptors: List[Interceptor[F, Raw]] + ): Either[InterpretationError, ServerInterpreter[F, Raw]] = { + val nonUniqueMethodNames = jsonRpcEndpoints.groupBy(_.endpoint.methodName).values.filter(_.size != 1).map(_.head.endpoint.methodName) + Either.cond( + nonUniqueMethodNames.isEmpty, + new ServerInterpreter(jsonRpcEndpoints, jsonSupport, interceptors), + InterpretationError.NonUniqueMethod(nonUniqueMethodNames.toList) + ) + } + + def applyUnsafe[F[_]: MonadError, Raw]( + jsonRpcEndpoints: List[JsonRpcServerEndpoint[F]], + jsonSupport: JsonSupport[Raw], + interceptors: List[Interceptor[F, Raw]] + ): ServerInterpreter[F, Raw] = + apply(jsonRpcEndpoints, jsonSupport, interceptors).left + .map(e => throw new IllegalArgumentException(e.toString)) + .merge + + sealed trait ServerResponse[+Raw] { + def body: Raw + } + + object ServerResponse { + final case class Success[+Raw](body: Raw) extends ServerResponse[Raw] + final case class Failure[+Raw](body: Raw) extends ServerResponse[Raw] + final case class ServerFailure[+Raw](body: Raw) extends ServerResponse[Raw] + } + + sealed trait ResponseHandlingStatus[+Raw] + object ResponseHandlingStatus { + final case class Handled[+Raw](response: Option[ServerResponse[Raw]]) extends ResponseHandlingStatus[Raw] + case object Unhandled extends ResponseHandlingStatus[Nothing] + } + + sealed trait InterpretationError + object InterpretationError { + case class NonUniqueMethod(names: List[MethodName]) extends InterpretationError + } + + private def defaultEndpointHandler[F[_], Raw](responder: Responder[F, Raw], jsonSupport: JsonSupport[Raw]): EndpointHandler[F, Raw] = { + new EndpointHandler[F, Raw] { + override def onDecodeSuccess[I, E, O]( + ctx: DecodeSuccessContext[F, I, E, O, Raw] + )(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + ctx.endpoint + .logic(monad)(ctx.input) + .flatMap(r => responder.apply(r, ctx.endpoint.endpoint, ctx.request.id)) + } + + override def onDecodeFailure(ctx: DecodeFailureContext[F, Raw])(implicit monad: MonadError[F]): F[ResponseHandlingStatus[Raw]] = { + val result: ResponseHandlingStatus[Raw] = if (ctx.request.isNotification) { + ResponseHandlingStatus.Handled(none) + } else { + ResponseHandlingStatus.Handled(ServerResponse.Failure(createErrorResponse(InvalidParams, ctx.request.id)).some) + } + monad.unit(result) + } + + private def createErrorResponse(error: JsonRpcError.NoData, id: Option[JsonRpcId]): Raw = { + jsonSupport.encodeResponse(JsonRpcResponse.error_v2(jsonSupport.encodeErrorNoData(error), id)) + } + } + } + +} diff --git a/server/src/io/iohk/armadillo/server/Utils.scala b/server/src/io/iohk/armadillo/server/Utils.scala new file mode 100644 index 0000000..c971e1e --- /dev/null +++ b/server/src/io/iohk/armadillo/server/Utils.scala @@ -0,0 +1,56 @@ +package io.iohk.armadillo.server + +import io.iohk.armadillo.{JsonRpcIO, JsonRpcInput} +import sttp.monad.MonadError +import sttp.monad.syntax._ +import sttp.tapir.DecodeResult + +object Utils { + implicit class RichEndpointInput[I](input: JsonRpcInput[I]) { + def traverseInputs[T](handle: PartialFunction[JsonRpcInput[_], Vector[T]]): Vector[T] = + input match { + case i: JsonRpcInput[_] if handle.isDefinedAt(i) => handle(i) + case JsonRpcInput.Pair(left, right) => left.traverseInputs(handle) ++ right.traverseInputs(handle) + case _ => Vector.empty + } + + def asVectorOfBasicInputs: Vector[JsonRpcIO.Single[_]] = + traverseInputs { case b: JsonRpcIO.Single[_] => + Vector(b) + } + } + + implicit class RichDecodeResult[T](decodeResult: DecodeResult[T]) { + def orElse(other: => DecodeResult[T]): DecodeResult[T] = { + decodeResult match { + case firstFailure: DecodeResult.Failure => + other match { + case secondFailure: DecodeResult.Failure => DecodeResult.Multiple(Seq(firstFailure, secondFailure)) + case success: DecodeResult.Value[T] => success + } + case success: DecodeResult.Value[T] => success + } + } + + def fold[R](success: T => R, error: DecodeResult.Failure => R): R = { + decodeResult match { + case failure: DecodeResult.Failure => error(failure) + case DecodeResult.Value(v) => success(v) + } + } + + def map2[B, C](fb: DecodeResult[B])(f: (T, B) => C): DecodeResult[C] = { + decodeResult.flatMap { a => + fb.map(b => f(a, b)) + } + } + } + + implicit class RichMonadErrorOps[F[_]: MonadError, A](fa: F[A]) { + def map2[B, C](fb: F[B])(f: (A, B) => C): F[C] = { + fa.flatMap { a => + fb.map(b => f(a, b)) + } + } + } +} diff --git a/server/src/io/iohk/armadillo/server/package.scala b/server/src/io/iohk/armadillo/server/package.scala new file mode 100644 index 0000000..55f4440 --- /dev/null +++ b/server/src/io/iohk/armadillo/server/package.scala @@ -0,0 +1,10 @@ +package io.iohk.armadillo + +package object server { + implicit class EndpointOps[I, E, O](jsonRpcEndpoint: JsonRpcEndpoint[I, E, O]) { + def `override`: OverridingEndpoint[I, E, O] = OverridingEndpoint(jsonRpcEndpoint) + } + implicit class ServerEndpointOps[I, E, O, F[_]](jsonRpcServerEndpoint: JsonRpcServerEndpoint.Full[I, E, O, F]) { + def `override`: OverridingEndpoint[I, E, O] = jsonRpcServerEndpoint.endpoint.`override` + } +} diff --git a/server/stub/src/io/iohk/armadillo/server/stub/ArmadilloStubInterpreter.scala b/server/stub/src/io/iohk/armadillo/server/stub/ArmadilloStubInterpreter.scala new file mode 100644 index 0000000..8e6d9d6 --- /dev/null +++ b/server/stub/src/io/iohk/armadillo/server/stub/ArmadilloStubInterpreter.scala @@ -0,0 +1,114 @@ +package io.iohk.armadillo.server.stub + +import io.iohk.armadillo.JsonRpcServerEndpoint.Full +import io.iohk.armadillo.server.{CustomInterceptors, Interceptor, JsonSupport} +import io.iohk.armadillo.{JsonRpcEndpoint, JsonRpcServerEndpoint} +import sttp.client3.testing.SttpBackendStub +import sttp.client3.{Request, SttpBackend} +import sttp.monad.MonadError +import sttp.monad.syntax._ + +final case class InputCheck[T](f: T => Either[String, T]) { + def apply(input: T): Either[String, T] = f(input) +} + +object InputCheck { + def pass[T]: InputCheck[T] = InputCheck[T](i => Right(i)) + + def exact[T](expected: T): InputCheck[T] = + InputCheck[T](input => Either.cond(input == expected, input, s"Invalid input received $input, expected $expected")) +} + +class ArmadilloStubInterpreter[F[_], Raw, R] private ( + ses: List[JsonRpcServerEndpoint[F]], + interceptors: List[Interceptor[F, Raw]], + stub: SttpBackendStub[F, R], + jsonSupport: JsonSupport[Raw] +) { + + def whenServerEndpoint[I, E, O](se: JsonRpcServerEndpoint.Full[I, E, O, F]): ArmadilloServerEndpointStub[I, E, O] = { + ArmadilloServerEndpointStub(se, InputCheck.pass) + } + + def whenEndpoint[I, E, O](e: JsonRpcEndpoint[I, E, O]): ArmadilloEndpointStub[I, E, O] = { + ArmadilloEndpointStub(e, InputCheck.pass) + } + + final case class ArmadilloEndpointStub[I, E, O]( + endpoint: JsonRpcEndpoint[I, E, O], + inputCheck: InputCheck[I] + ) { + + def assertInputs(expectedInputs: I): ArmadilloEndpointStub[I, E, O] = + copy(inputCheck = InputCheck.exact(expectedInputs)) + def assertInputs(inputCheck: InputCheck[I]): ArmadilloEndpointStub[I, E, O] = + copy(inputCheck = inputCheck) + + def thenRespond(response: O): ArmadilloStubInterpreter[F, Raw, R] = + append(endpoint.serverLogic(_ => (Right(response): Either[E, O]).unit), inputCheck) + + def thenRespondError(error: E): ArmadilloStubInterpreter[F, Raw, R] = + append(endpoint.serverLogic(_ => (Left(error): Either[E, O]).unit), inputCheck) + + def thenThrowError(error: Throwable): ArmadilloStubInterpreter[F, Raw, R] = + append(endpoint.serverLogic(_ => throw error), inputCheck) + } + + final case class ArmadilloServerEndpointStub[I, E, O]( + se: JsonRpcServerEndpoint.Full[I, E, O, F], + inputCheck: InputCheck[I] + ) { + + def assertInputs(expectedInputs: I): ArmadilloServerEndpointStub[I, E, O] = + copy(inputCheck = InputCheck.exact(expectedInputs)) + def assertInputs(inputCheck: InputCheck[I]): ArmadilloServerEndpointStub[I, E, O] = + copy(inputCheck = inputCheck) + + def thenRespond(response: O): ArmadilloStubInterpreter[F, Raw, R] = + append(se.endpoint.serverLogic(_ => (Right(response): Either[E, O]).unit), inputCheck) + + def thenRespondError(error: E): ArmadilloStubInterpreter[F, Raw, R] = + append(se.endpoint.serverLogic(_ => (Left(error): Either[E, O]).unit), inputCheck) + + def thenThrowError(error: Throwable): ArmadilloStubInterpreter[F, Raw, R] = + append(se.endpoint.serverLogic(_ => throw error), inputCheck) + + def thenRunLogic(): ArmadilloStubInterpreter[F, Raw, R] = append(se, inputCheck) + + } + + private def asserEndpointInput[I, E, O](se: Full[I, E, O, F], inputCheck: InputCheck[I]): Full[I, E, O, F] = { + se.endpoint.serverLogic(i => monadErrorFromEither(inputCheck(i)).flatMap(i => se.logic(monad)(i))) + } + + private def append[I, E, O]( + se: JsonRpcServerEndpoint.Full[I, E, O, F], + inputCheck: InputCheck[I] + ): ArmadilloStubInterpreter[F, Raw, R] = { + new ArmadilloStubInterpreter[F, Raw, R]( + ses :+ asserEndpointInput(se, inputCheck), + interceptors, + stub, + jsonSupport + ) + } + + private implicit val monad: MonadError[F] = stub.responseMonad + + private def monadErrorFromEither[T](either: Either[String, T]) = either match { + case Left(value) => monad.error(new IllegalArgumentException(value)) + case Right(value) => monad.unit(value) + } + + def backend(): SttpBackend[F, R] = { + stub.whenAnyRequest.thenRespondF(req => + new StubServerInterpreter(ses, interceptors, jsonSupport, stub).apply(req.asInstanceOf[Request[Any, R]]) + ) + } +} + +object ArmadilloStubInterpreter { + + def apply[F[_], R, Raw](stub: SttpBackendStub[F, R], jsonSupport: JsonSupport[Raw]): ArmadilloStubInterpreter[F, Raw, R] = + new ArmadilloStubInterpreter[F, Raw, R](List.empty, CustomInterceptors().interceptors, stub, jsonSupport) +} diff --git a/server/stub/src/io/iohk/armadillo/server/stub/StubServerInterpreter.scala b/server/stub/src/io/iohk/armadillo/server/stub/StubServerInterpreter.scala new file mode 100644 index 0000000..0d5f0ea --- /dev/null +++ b/server/stub/src/io/iohk/armadillo/server/stub/StubServerInterpreter.scala @@ -0,0 +1,29 @@ +package io.iohk.armadillo.server.stub + +import io.iohk.armadillo.JsonRpcServerEndpoint +import io.iohk.armadillo.server.tapir.TapirInterpreter +import io.iohk.armadillo.server.{Interceptor, JsonSupport} +import sttp.client3.testing.SttpBackendStub +import sttp.client3.{Request, Response} +import sttp.model.StatusCode +import sttp.monad.MonadError +import sttp.monad.syntax._ +import sttp.tapir.server.ServerEndpoint.Full +import sttp.tapir.server.stub.TapirStubInterpreter + +private[stub] class StubServerInterpreter[F[_]: MonadError, Raw, R]( + endpoints: List[JsonRpcServerEndpoint[F]], + interceptors: List[Interceptor[F, Raw]], + jsonSupport: JsonSupport[Raw], + backendStub: SttpBackendStub[F, R] +) { + def apply[T](req: Request[T, R]): F[Response[_]] = { + val tapirInterpreter = new TapirInterpreter[F, Raw](jsonSupport, interceptors) + val tapirEndpoint: Full[Unit, Unit, String, (Raw, StatusCode), (Option[Raw], StatusCode), Any, F] = tapirInterpreter + .toTapirEndpoint(endpoints) + .getOrElse(throw new RuntimeException("Error during conversion to tapir")) + val tapirStubInterpreter = + TapirStubInterpreter(backendStub).whenServerEndpoint(tapirEndpoint).thenRunLogic().backend() + tapirStubInterpreter.send(req).map(r => r.asInstanceOf[Response[Any]]) + } +} diff --git a/server/stub/test/src/io/iohk/armadillo/server/stub/StubServerInterpreterTest.scala b/server/stub/test/src/io/iohk/armadillo/server/stub/StubServerInterpreterTest.scala new file mode 100644 index 0000000..7d00fce --- /dev/null +++ b/server/stub/test/src/io/iohk/armadillo/server/stub/StubServerInterpreterTest.scala @@ -0,0 +1,171 @@ +package io.iohk.armadillo.server.stub + +import cats.effect.IO +import io.circe.Json +import io.circe.generic.auto._ +import io.circe.literal._ +import io.iohk.armadillo._ +import io.iohk.armadillo.json.circe._ +import io.iohk.armadillo.server.ServerInterpreter +import sttp.client3.HttpError +import sttp.client3.circe._ +import sttp.client3.impl.cats.CatsMonadError +import sttp.client3.testing.SttpBackendStub +import sttp.model.StatusCode +import sttp.model.Uri._ +import sttp.tapir.generic.auto._ +import weaver.SimpleIOSuite + +object StubServerInterpreterTest extends SimpleIOSuite { + + case class Greeting(msg: String) + + private val stubInterpreter: ArmadilloStubInterpreter[IO, Json, Nothing] = + ArmadilloStubInterpreter(SttpBackendStub(new CatsMonadError()), new CirceJsonSupport) + + private val testEndpoint = jsonRpcEndpoint(m"hello") + .in(param[String]("name")) + .out[Greeting]("greeting") + .errorOut(errorNoData) + + private val testEndpointWithNonPrimitiveInputs = jsonRpcEndpoint(m"hello") + .in(param[Greeting]("greeting")) + .out[String]("name") + .errorOut(errorNoData) + + test("should return stubbed response from endpoint") { + val stubbedResponse = Greeting("Hello test subject") + val backendStub = stubInterpreter + .whenEndpoint(testEndpoint) + .thenRespond(stubbedResponse) + .backend() + + val responseF = backendStub.send( + sttp.client3.basicRequest + .post(uri"http://localhost:7654") + .body(JsonRpcRequest.v2("hello", json"""[ "kasper" ]""", 1)) + .response(asJson[JsonRpcSuccessResponse[Greeting]]) + ) + responseF.map { r => + expect.same(Right(JsonRpcResponse.v2(stubbedResponse, 1)), r.body) + } + } + + test("should return stubbed error from endpoint") { + val stubbedError = JsonRpcError.noData(399, "Something went wrong") + val backendStub = stubInterpreter + .whenEndpoint(testEndpoint) + .thenRespondError(stubbedError) + .backend() + + val responseF = backendStub.send( + sttp.client3.basicRequest + .post(uri"http://localhost:7654") + .body(JsonRpcRequest.v2("hello", json"""[ "kasper" ]""", 1)) + .response(asJsonEither[JsonRpcErrorResponse[JsonRpcError.NoData], JsonRpcSuccessResponse[Greeting]]) + ) + responseF.map { r => + expect.same(Left(HttpError(JsonRpcResponse.error_v2(stubbedError, 1), StatusCode.BadRequest)), r.body) + } + } + + test("should throw error from endpoint") { + val backendStub = stubInterpreter + .whenEndpoint(testEndpoint) + .thenThrowError(new RuntimeException("Something went wrong")) + .backend() + + val responseF = backendStub.send( + sttp.client3.basicRequest + .post(uri"http://localhost:7654") + .body(JsonRpcRequest.v2("hello", json"""[ "kasper" ]""", 1)) + .response(asJsonEither[JsonRpcErrorResponse[JsonRpcError.NoData], JsonRpcSuccessResponse[Greeting]]) + ) + responseF.map { r => + expect.same(Left(HttpError(JsonRpcResponse.error_v2(ServerInterpreter.InternalError, 1), StatusCode.InternalServerError)), r.body) + } + } + + test("should run original logic of the endpoint") { + val serverEndpoint = testEndpoint.serverLogic { name => + IO.pure(Right(Greeting(s"Hello $name")): Either[JsonRpcError.NoData, Greeting]) + } + val backendStub = stubInterpreter + .whenServerEndpoint(serverEndpoint) + .thenRunLogic() + .backend() + + val responseF = backendStub.send( + sttp.client3.basicRequest + .post(uri"http://localhost:7654") + .body(JsonRpcRequest.v2("hello", json"""[ "kasper" ]""", 1)) + .response(asJson[JsonRpcSuccessResponse[Greeting]]) + ) + responseF.map { r => + expect.same(Right(JsonRpcResponse.v2(Greeting("Hello kasper"), 1)), r.body) + } + } + + test("should validate the serialization of input parameters") { + val serverEndpoint = testEndpoint.serverLogic { name => + IO.pure(Right(Greeting(s"Hello $name")): Either[JsonRpcError.NoData, Greeting]) + } + val backendStub = stubInterpreter + .whenServerEndpoint(serverEndpoint) + .assertInputs("kasper") + .thenRunLogic() + .backend() + + val responseF = backendStub.send( + sttp.client3.basicRequest + .post(uri"http://localhost:7654") + .body(JsonRpcRequest.v2("hello", json"""[ "kasper" ]""", 1)) + .response(asJson[JsonRpcSuccessResponse[Greeting]]) + ) + responseF.map { r => + expect.same(Right(JsonRpcResponse.v2(Greeting("Hello kasper"), 1)), r.body) + } + } + + test("should validate the serialization of non-primitive input parameters") { + val serverEndpoint = testEndpointWithNonPrimitiveInputs.serverLogic { _ => + IO.pure(Right("Greeting received !"): Either[JsonRpcError.NoData, String]) + } + val backendStub = stubInterpreter + .whenServerEndpoint(serverEndpoint) + .assertInputs(Greeting("hello kasper")) + .thenRunLogic() + .backend() + + val responseF = backendStub.send( + sttp.client3.basicRequest + .post(uri"http://localhost:7654") + .body(JsonRpcRequest.v2("hello", json"""[{ "msg": "hello kasper" }]""", 1)) + .response(asJson[JsonRpcSuccessResponse[String]]) + ) + responseF.map { r => + expect.same(Right(JsonRpcResponse.v2("Greeting received !", 1)), r.body) + } + } + + test("should throw error when the check on input parameters fails") { + val serverEndpoint = testEndpointWithNonPrimitiveInputs.serverLogic { _ => + IO.pure(Right("Greeting received !"): Either[JsonRpcError.NoData, String]) + } + val backendStub = stubInterpreter + .whenServerEndpoint(serverEndpoint) + .assertInputs(Greeting("kasper hello")) + .thenRunLogic() + .backend() + + val responseF = backendStub.send( + sttp.client3.basicRequest + .post(uri"http://localhost:7654") + .body(JsonRpcRequest.v2("hello", json"""[{ "msg": "hello kasper" }]""", 1)) + .response(asJsonEither[JsonRpcErrorResponse[JsonRpcError.NoData], JsonRpcSuccessResponse[String]]) + ) + responseF.map { r => + expect.same(Left(HttpError(JsonRpcResponse.error_v2(ServerInterpreter.InternalError, 1), StatusCode.InternalServerError)), r.body) + } + } +} diff --git a/server/tapir/src/io/iohk/armadillo/server/tapir/TapirInterpreter.scala b/server/tapir/src/io/iohk/armadillo/server/tapir/TapirInterpreter.scala new file mode 100644 index 0000000..336630a --- /dev/null +++ b/server/tapir/src/io/iohk/armadillo/server/tapir/TapirInterpreter.scala @@ -0,0 +1,98 @@ +package io.iohk.armadillo.server.tapir + +import io.iohk.armadillo._ +import io.iohk.armadillo.server.ServerInterpreter.{InterpretationError, ServerResponse} +import io.iohk.armadillo.server.{CustomInterceptors, Interceptor, JsonSupport, ServerInterpreter} +import sttp.model.StatusCode +import sttp.monad.MonadError +import sttp.monad.syntax._ +import sttp.tapir.Codec.JsonCodec +import sttp.tapir.EndpointIO.Info +import sttp.tapir.SchemaType.SCoproduct +import sttp.tapir.server.ServerEndpoint +import sttp.tapir.server.ServerEndpoint.Full +import sttp.tapir.{CodecFormat, DecodeResult, EndpointIO, RawBodyType, Schema, statusCode} + +import java.nio.charset.StandardCharsets + +class TapirInterpreter[F[_], Raw]( + jsonSupport: JsonSupport[Raw], + interceptors: List[Interceptor[F, Raw]] = CustomInterceptors[F, Raw]().interceptors +)(implicit + monadError: MonadError[F] +) { + + def toTapirEndpoint( + jsonRpcEndpoints: List[JsonRpcServerEndpoint[F]] + ): Either[InterpretationError, ServerEndpoint.Full[Unit, Unit, String, (Raw, StatusCode), (Option[Raw], StatusCode), Any, F]] = { + ServerInterpreter[F, Raw](jsonRpcEndpoints, jsonSupport, interceptors).map(toTapirEndpoint) + } + + def toTapirEndpointUnsafe( + jsonRpcEndpoints: List[JsonRpcServerEndpoint[F]] + ): ServerEndpoint.Full[Unit, Unit, String, (Raw, StatusCode), (Option[Raw], StatusCode), Any, F] = { + ServerInterpreter[F, Raw](jsonRpcEndpoints, jsonSupport, interceptors).map(toTapirEndpoint) match { + case Left(value) => throw new RuntimeException(value.toString) + case Right(result) => result + } + } + + private def toTapirEndpoint( + serverInterpreter: ServerInterpreter[F, Raw] + ): Full[Unit, Unit, String, (Raw, StatusCode), (Option[Raw], StatusCode), Any, F] = { + sttp.tapir.endpoint.post + .in(EndpointIO.Body(RawBodyType.StringBody(StandardCharsets.UTF_8), idJsonCodec, Info.empty)) + .errorOut(EndpointIO.Body(RawBodyType.StringBody(StandardCharsets.UTF_8), outRawCodec, Info.empty).and(statusCode)) + .out(EndpointIO.Body(RawBodyType.StringBody(StandardCharsets.UTF_8), outOptionRawCodec, Info.empty).and(statusCode)) + .serverLogic[F] { input => + serverInterpreter + .dispatchRequest(input) + .map { + case Some(ServerResponse.Success(value)) => Right((Some(value), StatusCode.Ok)) + case Some(ServerResponse.Failure(value)) => Left((value, StatusCode.BadRequest)) + case Some(ServerResponse.ServerFailure(value)) => Left((value, StatusCode.InternalServerError)) + case None => Right((None, StatusCode.NoContent)) + } + } + } + + private val outRawCodec: JsonCodec[Raw] = new JsonCodec[Raw] { + override def rawDecode(l: String): DecodeResult[Raw] = jsonSupport.parse(l).map(jsonSupport.demateralize) + + override def encode(h: Raw): String = jsonSupport.stringify(h) + + override def schema: Schema[Raw] = Schema( + SCoproduct(Nil, None)(_ => None), + None + ) + override def format: CodecFormat.Json = CodecFormat.Json() + } + + private val outOptionRawCodec: JsonCodec[Option[Raw]] = new JsonCodec[Option[Raw]] { + override def rawDecode(l: String): DecodeResult[Option[Raw]] = jsonSupport.parse(l).map(json => Some(jsonSupport.demateralize(json))) + + override def encode(h: Option[Raw]): String = h match { + case Some(value) => jsonSupport.stringify(value) + case None => "" // "204 No Content" required an empty body + } + + override def schema: Schema[Option[Raw]] = Schema( + SCoproduct(Nil, None)(_ => None), + None + ) + override def format: CodecFormat.Json = CodecFormat.Json() + } + + private val idJsonCodec: JsonCodec[String] = new JsonCodec[String] { + override def rawDecode(l: String): DecodeResult[String] = DecodeResult.Value(l) + + override def encode(h: String): String = h + + override def schema: Schema[String] = Schema[String]( + SCoproduct(Nil, None)(_ => None), + None + ) + + override def format: CodecFormat.Json = CodecFormat.Json() + } +} diff --git a/server/tapir/test/src/io/iohk/armadillo/server/tapir/http4s/BaseSuite.scala b/server/tapir/test/src/io/iohk/armadillo/server/tapir/http4s/BaseSuite.scala new file mode 100644 index 0000000..857702b --- /dev/null +++ b/server/tapir/test/src/io/iohk/armadillo/server/tapir/http4s/BaseSuite.scala @@ -0,0 +1,220 @@ +package io.iohk.armadillo.server.tapir.http4s + +import cats.effect.IO +import cats.effect.kernel.Resource +import io.circe.{Encoder, Json, parser} +import io.iohk.armadillo._ +import io.iohk.armadillo.json.circe.CirceJsonSupport +import io.iohk.armadillo.server.ServerInterpreter.{InterpretationError, ServerResponse} +import io.iohk.armadillo.server.tapir.TapirInterpreter +import io.iohk.armadillo.server.{AbstractCirceSuite, CirceEndpoints} +import org.http4s.HttpRoutes +import org.http4s.blaze.server.BlazeServerBuilder +import org.http4s.server.Router +import sttp.client3.asynchttpclient.cats.AsyncHttpClientCatsBackend +import sttp.client3.circe._ +import sttp.client3.{DeserializationException, HttpError, StringBody, SttpBackend, basicRequest} +import sttp.model.{MediaType, StatusCode, Uri} +import sttp.tapir.integ.cats.CatsMonadError +import sttp.tapir.server.ServerEndpoint +import sttp.tapir.server.http4s.{Http4sServerInterpreter, Http4sServerOptions} +import weaver.TestName + +import scala.concurrent.ExecutionContext + +trait BaseSuite extends AbstractCirceSuite[StringBody, ServerEndpoint[Any, IO]] with CirceEndpoints { + + override def invalidJson: StringBody = + StringBody("""{"jsonrpc": "2.0", "method": "foobar, "params": "bar", "baz]""", "utf-8", MediaType.ApplicationJson) + + override def jsonNotAnObject: StringBody = StringBody("""["asd"]""", "utf-8", MediaType.ApplicationJson) + + def testNotification[I, E, O]( + endpoint: JsonRpcEndpoint[I, E, O], + suffix: TestName + )( + f: I => IO[Either[E, O]] + )(request: JsonRpcRequest[Json]): Unit = { + test(suffix.copy(endpoint.showDetail + " as notification " + suffix.name)) { + testSingleEndpoint(endpoint)(f) + .use { case (backend, baseUri) => + basicRequest + .post(baseUri) + .body(request) + .send(backend) + .map { response => + expect.same(StatusCode.NoContent, response.code) + } + } + } + } + + def testInvalidRequest[I, E, O](suffix: TestName)(request: StringBody, expectedResponse: JsonRpcResponse[Json]): Unit = { + test(suffix) { + testSingleEndpoint(hello_in_int_out_string)(int => IO.pure(Right(int.toString))) + .use { case (backend, baseUri) => + basicRequest + .post(baseUri) + .body(StringBody("""{"jsonrpc": "2.0", "method": "foobar, "params": "bar", "baz]""", "utf-8", MediaType.ApplicationJson)) + .response(asJson[JsonRpcResponse[Json]]) + .send(backend) + .map { response => + expect.same( + expectedResponse match { + case success @ JsonRpcSuccessResponse(_, _, _) => ServerResponse.Success(jsonSupport.encodeResponse(success)) + case error @ JsonRpcErrorResponse(_, _, _) => ServerResponse.Failure(jsonSupport.encodeResponse(error)) + }, + response.body match { + case Left(error) => + error match { + case HttpError(body, _) => ServerResponse.Failure(parser.parse(body).toOption.get) + case DeserializationException(_, _) => throw new RuntimeException("DeserializationException was not expected") + } + case Right(body) => + body match { + case result @ JsonRpcSuccessResponse(_, _, _) => ServerResponse.Success(jsonSupport.encodeResponse(result)) + case error @ JsonRpcErrorResponse(_, _, _) => ServerResponse.Failure(jsonSupport.encodeResponse(error)) + } + } + ) + } + } + } + } + + def test[I, E, O, B: Encoder]( + endpoint: JsonRpcEndpoint[I, E, O], + suffix: TestName = "" + )( + f: I => IO[Either[E, O]] + )(request: B, expectedResponse: JsonRpcResponse[Json]): Unit = { + test(suffix.copy(name = endpoint.showDetail + " " + suffix.name)) { + testSingleEndpoint(endpoint)(f) + .use { case (backend, baseUri) => + basicRequest + .post(baseUri) + .body(request) + .response(asJson[JsonRpcResponse[Json]]) + .send(backend) + .map { response => + expect.same( + expectedResponse match { + case success @ JsonRpcSuccessResponse(_, _, _) => ServerResponse.Success(jsonSupport.encodeResponse(success)) + case error @ JsonRpcErrorResponse(_, _, _) => ServerResponse.Failure(jsonSupport.encodeResponse(error)) + }, + response.body match { + case Left(error) => + error match { + case HttpError(body, _) => ServerResponse.Failure(parser.parse(body).toOption.get) + case DeserializationException(_, _) => throw new RuntimeException("DeserializationException was not expected") + } + case Right(body) => + body match { + case result @ JsonRpcSuccessResponse(_, _, _) => ServerResponse.Success(jsonSupport.encodeResponse(result)) + case error @ JsonRpcErrorResponse(_, _, _) => ServerResponse.Failure(jsonSupport.encodeResponse(error)) + } + } + ) + } + } + } + } + + override def testServerError[I, E, O]( + endpoint: JsonRpcEndpoint[I, E, O], + suffix: TestName + )( + f: I => IO[Either[E, O]] + )(request: JsonRpcRequest[Json], expectedResponse: JsonRpcResponse[Json]): Unit = { + test(suffix.copy(name = endpoint.showDetail + " " + suffix.name)) { + testSingleEndpoint(endpoint)(f) + .use { case (backend, baseUri) => + basicRequest + .post(baseUri) + .body(request) + .response(asJson[JsonRpcResponse[Json]]) + .send(backend) + .map { response => + expect.same( + ServerResponse.ServerFailure(jsonSupport.encodeResponse(expectedResponse)), + response.body match { + case Left(error) => + error match { + case HttpError(body, _) => ServerResponse.ServerFailure(parser.parse(body).toOption.get) + case DeserializationException(_, _) => throw new RuntimeException("DeserializationException was not expected") + } + case Right(body) => + body match { + case result @ JsonRpcSuccessResponse(_, _, _) => ServerResponse.Success(jsonSupport.encodeResponse(result)) + case error @ JsonRpcErrorResponse(_, _, _) => ServerResponse.Failure(jsonSupport.encodeResponse(error)) + } + } + ) + } + } + } + } + + def testMultiple[B: Encoder](name: TestName)( + se: List[JsonRpcServerEndpoint[IO]] + )(request: List[B], expectedResponse: List[JsonRpcResponse[Json]]): Unit = { + test(name) { + testMultipleEndpoints(se) + .use { case (backend, baseUri) => + if (expectedResponse.isEmpty) { + basicRequest + .post(baseUri) + .body(request) + .send(backend) + .map { response => + expect.same(StatusCode.NoContent, response.code) + expect.same(Right(0), response.body.map(_.length)) + } + } else { + basicRequest + .post(baseUri) + .body(request) + .response(asJson[List[JsonRpcResponse[Json]]]) + .send(backend) + .map { response => + expect.same(StatusCode.Ok, response.code) + expect.same(Right(expectedResponse), response.body) + } + } + } + } + } + + def testSingleEndpoint[I, E, O]( + endpoint: JsonRpcEndpoint[I, E, O] + )(logic: I => IO[Either[E, O]]): Resource[IO, (SttpBackend[IO, Any], Uri)] = { + testMultipleEndpoints(List(endpoint.serverLogic(logic))) + } + + private def testMultipleEndpoints(se: List[JsonRpcServerEndpoint[IO]]): Resource[IO, (SttpBackend[IO, Any], Uri)] = { + val tapirEndpoints = toInterpreter(se).getOrElse(throw new RuntimeException("Error during conversion to tapir")) + val routes = Http4sServerInterpreter[IO](Http4sServerOptions.default[IO]).toRoutes(tapirEndpoints) + testServer(routes) + } + + override def toInterpreter(se: List[JsonRpcServerEndpoint[IO]]): Either[InterpretationError, ServerEndpoint[Any, IO]] = { + implicit val catsMonadError: CatsMonadError[IO] = new CatsMonadError + val tapirInterpreter = new TapirInterpreter[IO, Json](new CirceJsonSupport) + tapirInterpreter.toTapirEndpoint(se) + } + + private def testServer(routes: HttpRoutes[IO]): Resource[IO, (SttpBackend[IO, Any], Uri)] = { + BlazeServerBuilder[IO] + .withExecutionContext(ExecutionContext.global) + .bindHttp(0, "localhost") + .withHttpApp(Router("/" -> routes).orNotFound) + .resource + .map(_.address.getPort) + .flatMap { port => + AsyncHttpClientCatsBackend.resource[IO]().map { backend => + import sttp.model.Uri._ + (backend, uri"http://localhost:$port") + } + } + } +} diff --git a/server/tapir/test/src/io/iohk/armadillo/server/tapir/http4s/Http4sServerTest.scala b/server/tapir/test/src/io/iohk/armadillo/server/tapir/http4s/Http4sServerTest.scala new file mode 100644 index 0000000..35ed201 --- /dev/null +++ b/server/tapir/test/src/io/iohk/armadillo/server/tapir/http4s/Http4sServerTest.scala @@ -0,0 +1,8 @@ +package io.iohk.armadillo.server.tapir.http4s + +import cats.effect.IO +import io.iohk.armadillo.server.AbstractCirceSuite +import sttp.client3.StringBody +import sttp.tapir.server.ServerEndpoint + +object Http4sServerTest extends BaseSuite with AbstractCirceSuite[StringBody, ServerEndpoint[Any, IO]] diff --git a/server/test/src/io/iohk/armadillo/server/AbstractBaseSuite.scala b/server/test/src/io/iohk/armadillo/server/AbstractBaseSuite.scala new file mode 100644 index 0000000..48f6b8e --- /dev/null +++ b/server/test/src/io/iohk/armadillo/server/AbstractBaseSuite.scala @@ -0,0 +1,81 @@ +package io.iohk.armadillo.server + +import cats.effect.IO +import cats.syntax.all._ +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.{Decoder, Encoder, Json} +import io.iohk.armadillo.json.circe._ +import io.iohk.armadillo.json.json4s.Json4sSupport +import io.iohk.armadillo.json.json4s.Json4sSupport.JsonRpcIdSerializer +import io.iohk.armadillo.server.Endpoints.{EntitySerializer, IntStringPairSerializer, NoneSerializer, StrictStringSerializer} +import io.iohk.armadillo.server.ServerInterpreter.InterpretationError +import io.iohk.armadillo.{ + JsonRpcEndpoint, + JsonRpcErrorResponse, + JsonRpcRequest, + JsonRpcResponse, + JsonRpcServerEndpoint, + JsonRpcSuccessResponse +} +import org.json4s.JsonAST.JValue +import org.json4s.{Formats, NoTypeHints, Serialization} +import weaver.{SimpleIOSuite, TestName} + +trait AbstractCirceSuite[Body, Interpreter] extends AbstractBaseSuite[Json, Body, Interpreter] { + type Enc[T] = Encoder[T] + override lazy val jsonSupport: CirceJsonSupport = new CirceJsonSupport + implicit lazy val jsonRpcResponseDecoder: Decoder[JsonRpcResponse[Json]] = + deriveDecoder[JsonRpcSuccessResponse[Json]].widen.or(deriveDecoder[JsonRpcErrorResponse[Json]].widen) + + implicit lazy val jsonRpcRequestEncoder: Encoder[JsonRpcRequest[Json]] = deriveEncoder[JsonRpcRequest[Json]] + implicit lazy val jsonRpcRequestDecoder: Decoder[JsonRpcRequest[Json]] = deriveDecoder[JsonRpcRequest[Json]] +} +trait AbstractJson4sSuite[Body, Interpreter] extends AbstractBaseSuite[JValue, Body, Interpreter] { + type Enc[T] = Unit + implicit lazy val serialization: Serialization = org.json4s.jackson.Serialization + implicit lazy val formats: Formats = + org.json4s.jackson.Serialization + .formats(NoTypeHints) + JsonRpcIdSerializer + EntitySerializer + IntStringPairSerializer + StrictStringSerializer + NoneSerializer + + override lazy val jsonSupport: Json4sSupport = Json4sSupport(org.json4s.jackson.parseJson(_), org.json4s.jackson.compactJson) +} + +trait AbstractBaseSuite[Raw, Body, Interpreter] extends SimpleIOSuite { + type Enc[T] + + def jsonSupport: JsonSupport[Raw] + + def invalidJson: Body + def jsonNotAnObject: Body + + def testNotification[I, E, O]( + endpoint: JsonRpcEndpoint[I, E, O], + suffix: TestName = "" + )( + f: I => IO[Either[E, O]] + )(request: JsonRpcRequest[Raw]): Unit + + def testInvalidRequest[I, E, O]( + suffix: TestName = "" + )(request: Body, expectedResponse: JsonRpcResponse[Raw]): Unit + + def test[I, E, O, B: Enc]( + endpoint: JsonRpcEndpoint[I, E, O], + suffix: TestName = "" + )( + f: I => IO[Either[E, O]] + )(request: B, expectedResponse: JsonRpcResponse[Raw]): Unit + + def testServerError[I, E, O]( + endpoint: JsonRpcEndpoint[I, E, O], + suffix: TestName = "" + )( + f: I => IO[Either[E, O]] + )(request: JsonRpcRequest[Raw], expectedResponse: JsonRpcResponse[Raw]): Unit + + def testMultiple[B: Enc](name: TestName)( + se: List[JsonRpcServerEndpoint[IO]] + )(request: List[B], expectedResponse: List[JsonRpcResponse[Raw]]): Unit + + def toInterpreter(se: List[JsonRpcServerEndpoint[IO]]): Either[InterpretationError, Interpreter] +} diff --git a/server/test/src/io/iohk/armadillo/server/AbstractServerSuite.scala b/server/test/src/io/iohk/armadillo/server/AbstractServerSuite.scala new file mode 100644 index 0000000..e68cd65 --- /dev/null +++ b/server/test/src/io/iohk/armadillo/server/AbstractServerSuite.scala @@ -0,0 +1,372 @@ +package io.iohk.armadillo.server + +import cats.effect.IO +import io.circe.Json +import io.circe.literal._ +import io.iohk.armadillo.server.Endpoints._ +import io.iohk.armadillo.server.ServerInterpreter.InterpretationError +import io.iohk.armadillo.{JsonRpcError, JsonRpcRequest, JsonRpcResponse, Notification} + +import java.lang.Integer.parseInt + +trait AbstractServerSuite[Raw, Body, Interpreter] extends AbstractBaseSuite[Raw, Body, Interpreter] with Endpoints { + implicit def circeJsonToRaw(c: Json): Raw + implicit def jsonRpcRequestEncoder: Enc[JsonRpcRequest[Raw]] + implicit def rawEnc: Enc[Raw] + + test(hello_in_int_out_string, "simple call")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"[42]", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json"${"42"}", 1) + ) + + test(hello_in_int_out_string, "invalid params")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"[true]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_in_int_out_string, "too many params")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"[42, 43]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_in_int_out_string_by_name, "expected params by name")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"[42]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_in_int_out_string_by_position, "expected params by pos")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": 42}""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_in_int_out_string_validated, "param validation passed")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": 42}""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json"""${"42"}""", 1) + ) + + test(hello_in_int_out_string_validated, "param by name validation failed")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": -42}""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_in_int_out_string_validated, "param by position validation failed")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"""[ -42 ]""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + testNotification(hello_in_int_out_string, "notification")(int => IO.pure(Right(int.toString)))( + request = Notification.v2[Raw]("hello", json"[42]") + ) + + test(hello_in_int_out_string, "by_name")(int => IO.pure(Right(int.toString)))( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": 42}""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json"${"42"}", 1) + ) + + test(hello_in_multiple_int_out_string, "by_position") { case (int1, int2) => IO.pure(Right(s"${int1 + int2}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"[42, 43]", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json"${"85"}", 1) + ) + + test(hello_in_multiple_int_out_string, "by_name") { case (int1, int2) => IO.pure(Right(s"${int1 + int2}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": 42, "param2": 43}""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json"${"85"}", 1) + ) + + test(hello_in_multiple_validated, "validation passed") { case (int1, int2) => IO.pure(Right(s"${int1 + int2}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": -9, "param2": 12}""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json"${"3"}", 1) + ) + + test(hello_in_multiple_validated, "first param validation failed") { case (int1, int2) => IO.pure(Right(s"${int1 + int2}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": 42, "param2": 12}""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_in_multiple_validated, "second param validation failed") { case (int1, int2) => IO.pure(Right(s"${int1 + int2}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1": -9, "param2": 100}""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_with_validated_product, "product validation passed") { case (a, b) => IO.pure(Right(s"$a-$b")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""[ [1, "Bob"] ]""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json""""1-Bob"""", 1) + ) + + test(hello_with_validated_product, "product validation failed") { case (a, b) => IO.pure(Right(s"$a-$b")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""[ [101, "Bob"] ]""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_with_validated_coproduct, "coproduct validation passed") { e => IO.pure(Right(s"Hello ${e.id}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""[ {"Person": {"name": "Bob", "id": 1}} ]""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](json""""Hello 1"""", 1) + ) + + test(hello_with_validated_coproduct, "coproduct validation failed") { e => IO.pure(Right(s"Hello ${e.id}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""[ {"Person": {"name": "Bob", "id": 100}} ]""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_with_validated_branch_of_coproduct, "validation fail when passed as vector") { e => IO.pure(Right(s"Hello ${e.id}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""[ {"Person": {"name": "", "id": 100}} ]""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(hello_with_validated_branch_of_coproduct, "validation fail when passed as object") { e => IO.pure(Right(s"Hello ${e.id}")) }( + request = JsonRpcRequest.v2[Raw]("hello", json"""{"param1":{"Person": {"name": "", "id": 100}}}""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(empty, "empty response")(_ => IO.delay(Right(println("hello from server"))))( + request = JsonRpcRequest.v2[Raw]("empty", json"""[]""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.Null, 1) + ) + + test(empty, "method not found")(_ => IO.delay(Right(println("hello from server"))))( + request = JsonRpcRequest.v2[Raw]("non_existing_method", json"""[]""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32601, "message": "Method not found"}""", 1) + ) + + test(error_no_data, "no_data_error")(_ => IO.pure(Left(JsonRpcError.noData(123, "error"))))( + request = JsonRpcRequest.v2[Raw]("error_no_data", json"[]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": 123, "message": "error"}""", 1) + ) + + test(error_with_data)(_ => IO.pure(Left(JsonRpcError.withData(123, "error", 42))))( + request = JsonRpcRequest.v2[Raw]("error_with_data", json"[]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": 123, "message": "error", "data": 42}""", 1) + ) + + test(fixed_error, "fixed_error")(_ => IO.pure(Left(())))( + request = JsonRpcRequest.v2[Raw]("fixed_error", json"[]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": 200, "message": "something went wrong"}""", 1) + ) + + test(fixed_error_with_data, "fixed_error_with_data")(_ => IO.pure(Left("custom error message")))( + request = JsonRpcRequest.v2[Raw]("fixed_error_with_data", json"[]", 1), + expectedResponse = + JsonRpcResponse.error_v2[Raw](json"""{"code": 200, "message": "something went wrong", "data": "custom error message"}""", 1) + ) + + test(oneOf_fixed_errors_with_data, "oneOf_fixed_errors - small")(_ => IO.pure(Left(ErrorInfoSmall("aaa"))))( + request = JsonRpcRequest.v2[Raw]("fixed_error", json"[]", 1), + expectedResponse = + JsonRpcResponse.error_v2[Raw](json"""{"code": 201, "message": "something went really wrong", "data":{"msg":"aaa"}}""", 1) + ) + + test(oneOf_fixed_errors_with_data, "oneOf_fixed_errors - big")(_ => IO.pure(Left(ErrorInfoBig("aaa", 123))))( + request = JsonRpcRequest.v2[Raw]("fixed_error", json"[]", 1), + expectedResponse = + JsonRpcResponse.error_v2[Raw](json"""{"code": 200, "message": "something went wrong", "data":{"msg":"aaa", "code": 123}}""", 1) + ) + + test(oneOf_fixed_errors_value_matcher, "oneOf_fixed_errors_value_matcher - left")(_ => IO.pure(Left(Left(()))))( + request = JsonRpcRequest.v2[Raw]("fixed_error", json"[]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": 201, "message": "something went really wrong"}""", 1) + ) + + test(oneOf_fixed_errors_value_matcher, "oneOf_fixed_errors_value_matcher - right")(_ => IO.pure(Left(Right(()))))( + request = JsonRpcRequest.v2[Raw]("fixed_error", json"[]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": 200, "message": "something went wrong"}""", 1) + ) + + testServerError(empty, "internal server error")(_ => IO.raiseError(new RuntimeException("something went wrong")))( + request = JsonRpcRequest.v2[Raw]("empty", json"[]", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32603, "message": "Internal error"}""", 1) + ) + + testMultiple("batch_request_successful")( + List( + hello_in_int_out_string.serverLogic[IO](int => IO.pure(Right(int.toString))), + e1_int_string_out_int.serverLogic[IO](str => IO.delay(Right(parseInt(str)))) + ) + )( + request = List[JsonRpcRequest[Raw]]( + JsonRpcRequest.v2[Raw]("hello", json"[11]", "1"), + JsonRpcRequest.v2[Raw]("e1", json"""{"param1": "22"}""", 2) + ), + expectedResponse = List( + JsonRpcResponse.v2[Raw](Json.fromString("11"), 1), + JsonRpcResponse.v2[Raw](Json.fromInt(22), 2) + ) + ) + + testMultiple("batch_request_mixed: success & error & notification")( + List( + hello_in_int_out_string.serverLogic[IO](int => IO.pure(Right(int.toString))), + e1_int_string_out_int.serverLogic[IO](str => IO.delay(Right(parseInt(str)))), + error_with_data.serverLogic[IO](_ => IO.pure(Left(JsonRpcError.withData(123, "error", 123)))) + ) + )( + request = List[JsonRpcRequest[Raw]]( + JsonRpcRequest.v2[Raw]("hello", json"[11]", "1"), + Notification.v2[Raw]("e1", json"""{"param1": "22"}"""), + JsonRpcRequest.v2[Raw]("error_with_data", json"[]", "3") + ), + expectedResponse = List( + JsonRpcResponse.v2[Raw](Json.fromString("11"), 1), + JsonRpcResponse.error_v2[Raw](json"""{"code": 123, "message": "error", "data": 123}""", 3) + ) + ) + + testMultiple("batch_request internal server error")( + List( + hello_in_int_out_string.serverLogic[IO](_ => IO.raiseError(new RuntimeException("something went wrong"))), + e1_int_string_out_int.serverLogic[IO](_ => IO.raiseError(new RuntimeException("something went wrong"))), + error_with_data.serverLogic[IO](_ => IO.raiseError(new RuntimeException("something went wrong"))) + ) + )( + request = List[JsonRpcRequest[Raw]]( + JsonRpcRequest.v2[Raw]("hello", json"[11]", "1"), + Notification.v2[Raw]("e1", json"""{"param1": "22"}"""), + JsonRpcRequest.v2[Raw]("error_with_data", json"[]", "3") + ), + expectedResponse = List( + JsonRpcResponse.error_v2[Raw](json"""{"code": -32603, "message": "Internal error"}""", 1), + JsonRpcResponse.error_v2[Raw](json"""{"code": -32603, "message": "Internal error"}""", 3) + ) + ) + + testMultiple("batch_request method_not_found")(List.empty)( + request = List[JsonRpcRequest[Raw]]( + JsonRpcRequest.v2[Raw]("non_existing_method_1", json"[11]", "1"), + Notification.v2[Raw]("non_existing_method_2", json"""{"param1": "22"}"""), + JsonRpcRequest.v2[Raw]("non_existing_method_3", json"[11]", "3") + ), + expectedResponse = List( + JsonRpcResponse.error_v2[Raw](json"""{"code": -32601, "message": "Method not found"}""", 1), + JsonRpcResponse.error_v2[Raw](json"""{"code": -32601, "message": "Method not found"}""", 3) + ) + ) + + testMultiple("batch_request invalid request")(List.empty)( + request = List[Raw]( + json"""{"jsonrpc": "2.0", "method": 1, "params": "bar"}""", + json"""{"jsonrpc": "2.0", "method": 1, "params": "bar"}""" + ), + expectedResponse = List( + JsonRpcResponse.error_v2[Raw](json"""{"code": -32600, "message": "Invalid Request"}"""), + JsonRpcResponse.error_v2[Raw](json"""{"code": -32600, "message": "Invalid Request"}""") + ) + ) + + testMultiple("batch_request only notifications")( + List( + hello_in_int_out_string.serverLogic[IO](int => IO.pure(Right(int.toString))), + e1_int_string_out_int.serverLogic[IO](str => IO.delay(Right(parseInt(str)))) + ) + )( + request = List[JsonRpcRequest[Raw]]( + Notification.v2[Raw]("hello", json"[11]"), + Notification.v2[Raw]("e1", json"""{"param1": "22"}""") + ), + expectedResponse = List.empty + ) + + test("should return error when trying to pass non-unique methods to tapir interpreter") { + val se = hello_in_int_out_string.serverLogic[IO](int => IO.pure(Right(int.toString))) + val result = toInterpreter(List(se, se)) + IO.delay(expect.same(result, Left(InterpretationError.NonUniqueMethod(List(hello_in_int_out_string.methodName))))) + } + + test(hello_in_int_out_string, "invalid request")(int => IO.pure(Right(int.toString)))( + request = json"""{"jsonrpc": "2.0", "method": 1, "params": "bar"}""": Raw, + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32600, "message": "Invalid Request"}""") + ) + + test(hello_in_int_out_string, "invalid request structure")(int => IO.pure(Right(int.toString)))( + request = json"""123""": Raw, + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32600, "message": "Invalid Request"}""") + ) + + testInvalidRequest("parse error - invalid json")( + request = invalidJson, + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32700, "message": "Parse error"}""") + ) + + testInvalidRequest("parse error - root is not an object")( + request = jsonNotAnObject, + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32700, "message": "Parse error"}""") + ) + + test(optional_input, "all inputs provided using positional style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""["alice", 1]""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.fromString("1alice"), 1) + ) + + test(optional_input_last, "all inputs provided using positional style (optional param last)") { case (s, i) => + IO.pure(Right(s"$s${i.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input_last", json"""["alice", 1]""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.fromString("alice1"), 1) + ) + + test(optional_input, "all inputs provided using by-name style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""{"p2": 1, "p1": "alice"}""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.fromString("1alice"), 1) + ) + + test(optional_input, "optional input omitted when using positional style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""[1]""", 1), + expectedResponse = JsonRpcResponse.v2(Json.fromString("1"), 1) + ) + + test(optional_input_last, "optional input omitted when using positional style (optional param last)") { case (s, i) => + IO.pure(Right(s"$s${i.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input_last", json"""["alice"]""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.fromString("alice"), 1) + ) + + test(optional_input, "optional input omitted when using by-name style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""{"p2": 1}""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.fromString("1"), 1) + ) + + test(optional_input, "should fail when mandatory input omitted when using by-name style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""{"p1": "alice"}""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(optional_input, "should fail when mandatory input omitted when using by-pos style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""["alice"]""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(optional_input, "should fail when given more parameters than expected - positional style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""["alice", 1, 2]""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(optional_input, "should fail when given more parameters than expected - by-name style") { case (s, i) => + IO.pure(Right(s"$i${s.getOrElse("")}")) + }( + request = JsonRpcRequest.v2[Raw]("optional_input", json"""{"p1": "alice", "p2": 1, "p3": 2}""", 1), + expectedResponse = JsonRpcResponse.error_v2[Raw](json"""{"code": -32602, "message": "Invalid params"}""", 1) + ) + + test(optional_output, "should return optional response")(_ => IO.pure(Right(Option.empty[String])))( + request = JsonRpcRequest.v2[Raw]("optional_output", json"""{}""", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.Null, 1) + ) + + test(output_without_params, "should return response when no params attribute is missing")(_ => IO.pure(Right("params is not required")))( + request = JsonRpcRequest.v2[Raw]("output_without_params", 1), + expectedResponse = JsonRpcResponse.v2[Raw](Json.fromString("params is not required"), 1) + ) +} diff --git a/server/test/src/io/iohk/armadillo/server/Endpoints.scala b/server/test/src/io/iohk/armadillo/server/Endpoints.scala new file mode 100644 index 0000000..31eeeb0 --- /dev/null +++ b/server/test/src/io/iohk/armadillo/server/Endpoints.scala @@ -0,0 +1,183 @@ +package io.iohk.armadillo.server + +import io.circe.generic.auto._ +import io.iohk.armadillo._ +import io.iohk.armadillo.json.json4s.Json4sSupport +import io.iohk.armadillo.server.Endpoints._ +import org.json4s.{CustomSerializer, Extraction, Formats, JArray, JField, JInt, JNothing, JNull, JObject, JString, JValue, Serialization} +import sttp.tapir.generic.auto._ +import sttp.tapir.{Schema, ValidationResult, Validator} +trait CirceEndpoints extends Endpoints { + import io.iohk.armadillo.json.circe + + override implicit def intCodec: JsonRpcCodec[Int] = circe.jsonRpcCodec + override implicit def stringCodec: JsonRpcCodec[String] = circe.jsonRpcCodec + override implicit def stringIntCodec: JsonRpcCodec[(String, Int)] = circe.jsonRpcCodec + override implicit def intStringCodec: JsonRpcCodec[(Int, String)] = circe.jsonRpcCodec + override implicit def entityCodec(implicit schema: Schema[Entity]): JsonRpcCodec[Entity] = circe.jsonRpcCodec + override implicit def intErrorCodec: JsonRpcCodec[JsonRpcError[Int]] = circe.jsonRpcCodec + override implicit def smallCodec: JsonRpcCodec[ErrorInfoSmall] = circe.jsonRpcCodec + override implicit def bigCodec: JsonRpcCodec[ErrorInfoBig] = circe.jsonRpcCodec + override implicit def optionStringCodec: JsonRpcCodec[Option[String]] = circe.jsonRpcCodec + override implicit def optionIntCodec: JsonRpcCodec[Option[Int]] = circe.jsonRpcCodec +} + +trait Json4sEndpoints extends Endpoints { + import io.iohk.armadillo.json.json4s + + implicit val serialization: Serialization + implicit val formats: Formats + implicit val jsonSupport: Json4sSupport + + override implicit def intCodec: JsonRpcCodec[Int] = json4s.jsonRpcCodec + override implicit def stringCodec: JsonRpcCodec[String] = json4s.jsonRpcCodec + override implicit def stringIntCodec: JsonRpcCodec[(String, Int)] = json4s.jsonRpcCodec + override implicit def intStringCodec: JsonRpcCodec[(Int, String)] = json4s.jsonRpcCodec + override implicit def entityCodec(implicit schema: Schema[Entity]): JsonRpcCodec[Entity] = json4s.jsonRpcCodec + override implicit def intErrorCodec: JsonRpcCodec[JsonRpcError[Int]] = json4s.jsonRpcCodec + override implicit def smallCodec: JsonRpcCodec[ErrorInfoSmall] = json4s.jsonRpcCodec + override implicit def bigCodec: JsonRpcCodec[ErrorInfoBig] = json4s.jsonRpcCodec + override implicit def optionStringCodec: JsonRpcCodec[Option[String]] = json4s.jsonRpcCodec + override implicit def optionIntCodec: JsonRpcCodec[Option[Int]] = json4s.jsonRpcCodec +} + +trait Endpoints { + implicit def intCodec: JsonRpcCodec[Int] + implicit def stringCodec: JsonRpcCodec[String] + implicit def stringIntCodec: JsonRpcCodec[(String, Int)] + implicit def intStringCodec: JsonRpcCodec[(Int, String)] + implicit def entityCodec(implicit schema: Schema[Entity]): JsonRpcCodec[Entity] + implicit def intErrorCodec: JsonRpcCodec[JsonRpcError[Int]] + implicit def smallCodec: JsonRpcCodec[ErrorInfoSmall] + implicit def bigCodec: JsonRpcCodec[ErrorInfoBig] + implicit def optionStringCodec: JsonRpcCodec[Option[String]] + implicit def optionIntCodec: JsonRpcCodec[Option[Int]] + + val hello_in_int_out_string: JsonRpcEndpoint[Int, Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1")) + .out[String]("response") + + val hello_in_int_out_string_by_name: JsonRpcEndpoint[Int, Unit, String] = jsonRpcEndpoint(m"hello", ParamStructure.ByName) + .in(param[Int]("param1")) + .out[String]("response") + + val hello_in_int_out_string_by_position: JsonRpcEndpoint[Int, Unit, String] = + jsonRpcEndpoint(m"hello", ParamStructure.ByPosition) + .in(param[Int]("param1")) + .out[String]("response") + + val hello_in_int_out_string_validated: JsonRpcEndpoint[Int, Unit, String] = + jsonRpcEndpoint(m"hello", ParamStructure.Either) + .in(param[Int]("param1").validate(Validator.min(0))) + .out[String]("response") + + val hello_in_multiple_int_out_string: JsonRpcEndpoint[(Int, Int), Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Int]("param1").and(param[Int]("param2"))) + .out[String]("response") + + val hello_in_multiple_validated: JsonRpcEndpoint[(Int, Int), Unit, String] = jsonRpcEndpoint(m"hello") + .in( + param[Int]("param1") + .validate(Validator.negative[Int].and(Validator.min(-10))) + .and( + param[Int]("param2") + .validate(Validator.inRange(10, 20)) + ) + ) + .out[String]("response") + + val hello_with_validated_product: JsonRpcEndpoint[(Int, String), Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[(Int, String)]("param1").validate(Validator.max(10).contramap(_._1))) + .out[String]("response") + + val hello_with_validated_coproduct: JsonRpcEndpoint[Entity, Unit, String] = jsonRpcEndpoint(m"hello") + .in(param[Entity]("param1").validate(Validator.max(10).contramap(_.id))) + .out[String]("response") + + val hello_with_validated_branch_of_coproduct: JsonRpcEndpoint[Entity, Unit, String] = { + implicit val schemaForPerson: Schema[Person] = + Schema.derived[Person].validate(Validator.custom(p => ValidationResult.validWhen(p.name.nonEmpty))) + jsonRpcEndpoint(m"hello") + .in(param[Entity]("param1")) + .out[String]("response") + } + val empty: JsonRpcEndpoint[Unit, Unit, Unit] = jsonRpcEndpoint(m"empty") + + val error_no_data: JsonRpcEndpoint[Unit, JsonRpcError.NoData, Unit] = jsonRpcEndpoint(m"error_no_data") + .errorOut(errorNoData) + + val error_with_data: JsonRpcEndpoint[Unit, JsonRpcError[Int], Unit] = jsonRpcEndpoint(m"error_with_data") + .errorOut(errorWithData[Int]) + + val fixed_error: JsonRpcEndpoint[Unit, Unit, Unit] = jsonRpcEndpoint(m"fixed_error") + .errorOut(fixedError(200, "something went wrong")) + + val fixed_error_with_data: JsonRpcEndpoint[Unit, String, Unit] = jsonRpcEndpoint(m"fixed_error_with_data") + .errorOut(fixedErrorWithData[String](200, "something went wrong")) + + val oneOf_fixed_errors_with_data: JsonRpcEndpoint[Unit, ErrorInfo, Unit] = jsonRpcEndpoint(m"fixed_error") + .errorOut( + oneOf[ErrorInfo]( + oneOfVariant(fixedErrorWithData[ErrorInfoSmall](201, "something went really wrong")), + oneOfVariant(fixedErrorWithData[ErrorInfoBig](200, "something went wrong")) + ) + ) + val oneOf_fixed_errors_value_matcher: JsonRpcEndpoint[Unit, Either[Unit, Unit], Unit] = jsonRpcEndpoint(m"fixed_error") + .errorOut( + oneOf( + oneOfVariantValueMatcher[Either[Unit, Unit]](fixedError(201, "something went really wrong")) { case Left(_) => true }, + oneOfVariantValueMatcher[Either[Unit, Unit]](fixedError(200, "something went wrong")) { case Right(_) => true } + ) + ) + + val e1_int_string_out_int: JsonRpcEndpoint[String, Unit, Int] = jsonRpcEndpoint(m"e1") + .in(param[String]("param1")) + .out[Int]("response") + + val optional_input: JsonRpcEndpoint[(Option[String], Int), Unit, String] = jsonRpcEndpoint(m"optional_input") + .in( + param[Option[String]]("p1").and(param[Int]("p2")) + ) + .out[String]("response") + + val optional_input_last: JsonRpcEndpoint[(String, Option[Int]), Unit, String] = jsonRpcEndpoint(m"optional_input_last") + .in( + param[String]("p1").and(param[Option[Int]]("p2")) + ) + .out[String]("response") + + val optional_output: JsonRpcEndpoint[Unit, Unit, Option[String]] = jsonRpcEndpoint(m"optional_output") + .out[Option[String]]("response") + + val output_without_params: JsonRpcEndpoint[Unit, Unit, String] = jsonRpcEndpoint(m"output_without_params") + .out[String]("response") +} + +object Endpoints { + + sealed trait ErrorInfo + case class ErrorInfoSmall(msg: String) extends ErrorInfo + case class ErrorInfoBig(msg: String, code: Int) extends ErrorInfo + + sealed trait Entity { + def id: Int + } + final case class Person(name: String, id: Int) extends Entity + + object EntitySerializer + extends CustomSerializer[Entity](implicit formats => + ( + Function.unlift((_: JValue).extractOpt[Person]).compose { case JObject(JField("Person", v) :: Nil) => v }, + { case person: Person => JObject("Person" -> Extraction.decompose(person)) } + ) + ) + + object IntStringPairSerializer + extends CustomSerializer[(Int, String)](_ => + ({ case JArray(JInt(int) :: JString(str) :: Nil) => (int.toInt, str) }, PartialFunction.empty) + ) + + object StrictStringSerializer extends CustomSerializer[String](_ => ({ case JString(str) => str }, { case str: String => JString(str) })) + + object NoneSerializer extends CustomSerializer[None.type](_ => ({ case JNothing | JNull => None }, { case None => JNull })) +} diff --git a/server/test/src/io/iohk/armadillo/server/OverrideInterceptorTest.scala b/server/test/src/io/iohk/armadillo/server/OverrideInterceptorTest.scala new file mode 100644 index 0000000..fb9f0af --- /dev/null +++ b/server/test/src/io/iohk/armadillo/server/OverrideInterceptorTest.scala @@ -0,0 +1,167 @@ +package io.iohk.armadillo.server + +import cats.effect.{IO, Ref} +import cats.syntax.all._ +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.literal._ +import io.circe.{Decoder, Encoder, Json} +import io.iohk.armadillo.json.circe.{CirceJsonSupport, _} +import io.iohk.armadillo.server.ServerInterpreter.ServerResponse +import io.iohk.armadillo.server._ +import io.iohk.armadillo.{JsonRpcErrorResponse, JsonRpcRequest, JsonRpcResponse, JsonRpcSuccessResponse} +import sttp.tapir.integ.cats.CatsMonadError +import weaver.SimpleIOSuite + +object OverrideInterceptorTest extends SimpleIOSuite with CirceEndpoints { + implicit lazy val jsonRpcResponseDecoder: Decoder[JsonRpcResponse[Json]] = + deriveDecoder[JsonRpcSuccessResponse[Json]].widen.or(deriveDecoder[JsonRpcErrorResponse[Json]].widen) + + implicit lazy val jsonRpcRequestEncoder: Encoder[JsonRpcRequest[Json]] = deriveEncoder[JsonRpcRequest[Json]] + implicit lazy val jsonRpcRequestDecoder: Decoder[JsonRpcRequest[Json]] = deriveDecoder[JsonRpcRequest[Json]] + implicit val monadError: CatsMonadError[IO] = new CatsMonadError[IO] + private val jsonSupport = new CirceJsonSupport + + test("should return method not found when calling non-existing overridden endpoint") { + val interpreter = ServerInterpreter.applyUnsafe(List.empty, jsonSupport, CustomInterceptors[IO, Json]().interceptors) + + interpreter + .dispatchRequest( + jsonSupport.stringify(Encoder[JsonRpcRequest[Json]].apply(JsonRpcRequest.v2[Json]("hello", json"[42]", 1))) + ) + .map { + case Some(ServerResponse.Failure(json)) => + expect(json == json"""{ + "jsonrpc" : "2.0", + "error" : ${ServerInterpreter.MethodNotFound}, + "id" : 1 + }""") + case _ => failure("Expected server fail response") + } + } + + test("should not call the original endpoint and return overridden response") { + val ref = Ref.unsafe(0) + val originalEndpoint = hello_in_int_out_string.serverLogic { i => + ref.update(_ + 1) >> IO.pure(s"greetings $i".asRight[Unit]) + } + val overridingEndpoint = originalEndpoint.`override`.thenReturn(IO.pure("stubbed response".asRight[Unit])) + val interpreter = ServerInterpreter.applyUnsafe( + List(originalEndpoint), + jsonSupport, + CustomInterceptors[IO, Json](overriddenEndpoints = List(overridingEndpoint)).interceptors + ) + + interpreter + .dispatchRequest( + jsonSupport.stringify(Encoder[JsonRpcRequest[Json]].apply(JsonRpcRequest.v2[Json]("hello", json"[42]", 1))) + ) + .product(ref.get) + .map { + case (Some(ServerResponse.Success(json)), counter) => + expect(json == json"""{ + "jsonrpc" : "2.0", + "result" : "stubbed response", + "id" : 1 + } + """).and(expect(counter == 0)) + case _ => failure("Expected server success response") + } + } + + test("should call override before the original logic") { + val ref = Ref.unsafe(0) + val originalEndpoint = hello_in_int_out_string.serverLogic { i => + ref.update(_ + 2) >> IO.pure(s"greetings $i".asRight[Unit]) + } + val overridingEndpoint = originalEndpoint.`override`.runBeforeLogic(ref.update(_ * 2)) + val interpreter = ServerInterpreter.applyUnsafe( + List(originalEndpoint), + jsonSupport, + CustomInterceptors[IO, Json](overriddenEndpoints = List(overridingEndpoint)).interceptors + ) + + interpreter + .dispatchRequest( + jsonSupport.stringify(Encoder[JsonRpcRequest[Json]].apply(JsonRpcRequest.v2[Json]("hello", json"[42]", 1))) + ) + .product(ref.get) + .map { + case (Some(ServerResponse.Success(json)), counter) => + expect(json == json"""{ + "jsonrpc" : "2.0", + "result" : "greetings 42", + "id" : 1 + } + """).and(expect(counter == 2)) + case _ => failure("Expected server success response") + } + } + + test("should call override after the original logic") { + val ref = Ref.unsafe(0) + val originalEndpoint = hello_in_int_out_string.serverLogic { i => + ref.update(_ + 2) >> IO.pure(s"greetings $i".asRight[Unit]) + } + val overridingEndpoint = originalEndpoint.`override`.runAfterLogic(ref.update(_ * 2)) + val interpreter = ServerInterpreter.applyUnsafe( + List(originalEndpoint), + jsonSupport, + CustomInterceptors[IO, Json](overriddenEndpoints = List(overridingEndpoint)).interceptors + ) + + interpreter + .dispatchRequest( + jsonSupport.stringify(Encoder[JsonRpcRequest[Json]].apply(JsonRpcRequest.v2[Json]("hello", json"[42]", 1))) + ) + .product(ref.get) + .map { + case (Some(ServerResponse.Success(json)), counter) => + expect( + json == + json"""{ + "jsonrpc" : "2.0", + "result" : "greetings 42", + "id" : 1 + } + """ + ).and(expect(counter == 4)) + case _ => failure("Expected server success response") + } + } + + test("should replace the logic by the logic of another endpoints") { + val ref = Ref.unsafe(0) + val originalEndpoint = hello_in_int_out_string.serverLogic { i => + ref.update(_ + 2) >> IO.pure(s"greetings $i".asRight[Unit]) + } + val anotherEndpoint = hello_in_int_out_string.serverLogic { i => + ref.update(_ + 3) >> IO.pure(s"greetings $i".asRight[Unit]) + } + + val overridingEndpoint = originalEndpoint.`override`.replaceLogic(anotherEndpoint.logic) + val interpreter = ServerInterpreter.applyUnsafe( + List(originalEndpoint), + jsonSupport, + CustomInterceptors[IO, Json](overriddenEndpoints = List(overridingEndpoint)).interceptors + ) + + interpreter + .dispatchRequest( + jsonSupport.stringify(Encoder[JsonRpcRequest[Json]].apply(JsonRpcRequest.v2[Json]("hello", json"[42]", 1))) + ) + .product(ref.get) + .map { + case (Some(ServerResponse.Success(json)), counter) => + expect( + json == + json"""{ + "jsonrpc" : "2.0", + "result" : "greetings 42", + "id" : 1 + } + """ + ).and(expect(counter == 3)) + case _ => failure("Expected server success response") + } + } +} diff --git a/server/test/src/io/iohk/armadillo/server/ServerInterpreterTest.scala b/server/test/src/io/iohk/armadillo/server/ServerInterpreterTest.scala new file mode 100644 index 0000000..e7e377a --- /dev/null +++ b/server/test/src/io/iohk/armadillo/server/ServerInterpreterTest.scala @@ -0,0 +1,129 @@ +package io.iohk.armadillo.server + +import cats.effect.IO +import io.circe.{Encoder, Json} +import io.iohk.armadillo._ +import io.iohk.armadillo.server.ServerInterpreter.ServerResponse +import org.json4s.{Extraction, JValue} +import sttp.tapir.integ.cats.CatsMonadError +import weaver.TestName + +object CirceServerInterpreterTest + extends ServerInterpreterTest[Json] + with AbstractCirceSuite[String, ServerInterpreter[IO, Json]] + with CirceEndpoints { + + override def encode[B: Encoder](b: B): Json = Encoder[B].apply(b) + + override def circeJsonToRaw(c: Json): Json = c + + override def rawEnc: Encoder[Json] = implicitly + +} + +object Json4sServerInterpreterTest + extends ServerInterpreterTest[JValue] + with AbstractJson4sSuite[String, ServerInterpreter[IO, JValue]] + with Json4sEndpoints { + + override def encode[B: Enc](b: B): JValue = Extraction.decompose(b) + + override def circeJsonToRaw(c: Json): JValue = org.json4s.jackson.parseJson(c.noSpaces) + + override def rawEnc: Enc[JValue] = () + + override implicit def jsonRpcRequestEncoder: Unit = () +} + +trait ServerInterpreterTest[Raw] + extends AbstractServerSuite[Raw, String, ServerInterpreter[IO, Raw]] + with AbstractBaseSuite[Raw, String, ServerInterpreter[IO, Raw]] + with Endpoints { + + override def invalidJson: String = """{"jsonrpc": "2.0", "method": "foobar, "params": "bar", "baz]""" + + override def jsonNotAnObject: String = """["asd"]""" + + protected def createInterpreter(se: List[JsonRpcServerEndpoint[IO]]): ServerInterpreter[IO, Raw] = { + toInterpreter(se).getOrElse(throw new RuntimeException("cannot create interpreter")) + } + override def toInterpreter( + se: List[JsonRpcServerEndpoint[IO]] + ): Either[ServerInterpreter.InterpretationError, ServerInterpreter[IO, Raw]] = { + ServerInterpreter(se, jsonSupport, CustomInterceptors().interceptors)(new CatsMonadError[IO]) + } + + def encode[B: Enc](b: B): Raw + + override def testNotification[I, E, O](endpoint: JsonRpcEndpoint[I, E, O], suffix: TestName)( + f: I => IO[Either[E, O]] + )(request: JsonRpcRequest[Raw]): Unit = { + test(suffix.copy(name = endpoint.showDetail + " as notification " + suffix.name)) { + val interpreter = createInterpreter(List(endpoint.serverLogic(f))) + val strRequest = jsonSupport.stringify(encode(request)) + interpreter.dispatchRequest(strRequest).map { response => + expect.same(Option.empty, response) + } + } + } + + override def testInvalidRequest[I, E, O](name: TestName)(request: String, expectedResponse: JsonRpcResponse[Raw]): Unit = { + test(name) { + val interpreter = createInterpreter(List(hello_in_int_out_string.serverLogic[IO](int => IO.pure(Right(int.toString))))) + interpreter.dispatchRequest(request).map { response => + val expectedServerResponse = expectedResponse match { + case success @ JsonRpcSuccessResponse(_, _, _) => ServerResponse.Success(jsonSupport.encodeResponse(success)) + case error @ JsonRpcErrorResponse(_, _, _) => ServerResponse.Failure(jsonSupport.encodeResponse(error)) + } + expect.same(Some(expectedServerResponse), response) + } + } + } + + override def test[I, E, O, B: Enc](endpoint: JsonRpcEndpoint[I, E, O], suffix: TestName)( + f: I => IO[Either[E, O]] + )(request: B, expectedResponse: JsonRpcResponse[Raw]): Unit = { + test(suffix.copy(name = endpoint.showDetail + " " + suffix.name)) { + val interpreter = createInterpreter(List(endpoint.serverLogic(f))) + val strRequest = jsonSupport.stringify(encode(request)) + interpreter.dispatchRequest(strRequest).map { response => + val expectedServerResponse = expectedResponse match { + case success @ JsonRpcSuccessResponse(_, _, _) => ServerResponse.Success(jsonSupport.encodeResponse(success)) + case error @ JsonRpcErrorResponse(_, _, _) => ServerResponse.Failure(jsonSupport.encodeResponse(error)) + } + expect.same(Some(expectedServerResponse), response) + } + } + } + + override def testServerError[I, E, O](endpoint: JsonRpcEndpoint[I, E, O], suffix: TestName)( + f: I => IO[Either[E, O]] + )(request: JsonRpcRequest[Raw], expectedResponse: JsonRpcResponse[Raw]): Unit = { + test(suffix.copy(name = endpoint.showDetail + " " + suffix.name)) { + val interpreter = createInterpreter(List(endpoint.serverLogic(f))) + val strRequest = jsonSupport.stringify(encode(request)) + interpreter.dispatchRequest(strRequest).map { response => + val expectedServerResponse = ServerResponse.ServerFailure(jsonSupport.encodeResponse(expectedResponse)) + expect.same(Some(expectedServerResponse), response) + } + } + } + + override def testMultiple[B: Enc](name: TestName)( + se: List[JsonRpcServerEndpoint[IO]] + )(request: List[B], expectedResponses: List[JsonRpcResponse[Raw]]): Unit = { + test(name) { + val interpreter = createInterpreter(se) + val strRequest = jsonSupport.stringify(jsonSupport.asArray(request.map(encode[B]))) + interpreter.dispatchRequest(strRequest).map { response => + val expectedServerInterpreterResponse = if (expectedResponses.isEmpty) { + Option.empty + } else { + val json = jsonSupport.asArray(expectedResponses.map(jsonSupport.encodeResponse)) + Some(ServerResponse.Success(json)) + } + expect.same(expectedServerInterpreterResponse, response) + } + } + } +} diff --git a/shell.nix b/shell.nix new file mode 100644 index 0000000..ef42e4e --- /dev/null +++ b/shell.nix @@ -0,0 +1,12 @@ +# For compatibility with non-flake-enabled Nix versions +# ...and nix-env-selector +(import + ( + let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; + } + ) + { src = ./.; } +).shellNix \ No newline at end of file diff --git a/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloResourceKleislis.scala b/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloResourceKleislis.scala new file mode 100644 index 0000000..32ab93c --- /dev/null +++ b/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloResourceKleislis.scala @@ -0,0 +1,29 @@ +package io.iohk.armadillo.trace4cats + +import cats.Monad +import cats.data.{EitherT, Kleisli} +import cats.effect.kernel.Resource +import io.janstenpickle.trace4cats.inject.{ResourceKleisli, SpanParams} +import io.janstenpickle.trace4cats.model.{SpanKind, TraceHeaders} +import io.janstenpickle.trace4cats.{ErrorHandler, Span} + +object ArmadilloResourceKleislis { + def fromInput[F[_], I]( + inSpanNamer: ArmadilloInputSpanNamer[I] + )(k: ResourceKleisli[F, SpanParams, Span[F]]): ResourceKleisli[F, I, Span[F]] = + Kleisli { input => + k.run((inSpanNamer(input), SpanKind.Server, TraceHeaders.empty, ErrorHandler.empty)) + } + + def fromInputContext[F[_]: Monad, I, E, Ctx]( + makeContext: (I, Span[F]) => F[Either[E, Ctx]], + inSpanNamer: ArmadilloInputSpanNamer[I], + errorToSpanStatus: ArmadilloStatusMapping[E] + )(k: ResourceKleisli[F, SpanParams, Span[F]]): ResourceKleisli[F, I, Either[E, Ctx]] = + fromInput(inSpanNamer)(k).tapWithF { (req, span) => + val fa = EitherT(makeContext(req, span)) + .leftSemiflatTap(e => span.setStatus(errorToSpanStatus(e))) + .value + Resource.eval(fa) + } +} diff --git a/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloSpanNamer.scala b/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloSpanNamer.scala new file mode 100644 index 0000000..b308510 --- /dev/null +++ b/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloSpanNamer.scala @@ -0,0 +1,5 @@ +package io.iohk.armadillo.trace4cats + +object ArmadilloSpanNamer { + def methodName[I]: ArmadilloSpanNamer[I] = (ep, _) => ep.methodName.asString +} diff --git a/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloStatusMapping.scala b/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloStatusMapping.scala new file mode 100644 index 0000000..41ed95d --- /dev/null +++ b/trace4cats/src/io/iohk/armadillo/trace4cats/ArmadilloStatusMapping.scala @@ -0,0 +1,7 @@ +package io.iohk.armadillo.trace4cats + +import io.janstenpickle.trace4cats.model.SpanStatus + +object ArmadilloStatusMapping { + def errorStringToInternal[E]: ArmadilloStatusMapping[E] = e => SpanStatus.Internal(e.toString) +} diff --git a/trace4cats/src/io/iohk/armadillo/trace4cats/ServerEndpointSyntax.scala b/trace4cats/src/io/iohk/armadillo/trace4cats/ServerEndpointSyntax.scala new file mode 100644 index 0000000..c87fc92 --- /dev/null +++ b/trace4cats/src/io/iohk/armadillo/trace4cats/ServerEndpointSyntax.scala @@ -0,0 +1,90 @@ +package io.iohk.armadillo.trace4cats + +import cats.Monad +import cats.effect.kernel.MonadCancelThrow +import cats.syntax.either._ +import io.iohk.armadillo.JsonRpcServerEndpoint +import io.janstenpickle.trace4cats.Span +import io.janstenpickle.trace4cats.base.context.Provide +import io.janstenpickle.trace4cats.inject.{EntryPoint, ResourceKleisli, Trace} + +trait ServerEndpointSyntax { + implicit class TracedServerEndpoint[I, E, O, F[_], G[_]]( + serverEndpoint: JsonRpcServerEndpoint.Full[I, E, O, G] + ) { + def inject( + entryPoint: EntryPoint[F], + spanNamer: ArmadilloSpanNamer[I] = ArmadilloSpanNamer.methodName[I], + errorToSpanStatus: ArmadilloStatusMapping[E] = ArmadilloStatusMapping.errorStringToInternal + )(implicit + P: Provide[F, G, Span[F]], + F: MonadCancelThrow[F], + G: Monad[G], + T: Trace[G] + ): JsonRpcServerEndpoint.Full[I, E, O, F] = { + val inputSpanNamer = spanNamer(serverEndpoint.endpoint, _) + val context = ArmadilloResourceKleislis + .fromInput[F, I](inputSpanNamer)(entryPoint.toKleisli) + .map(_.asRight[E]) + ServerEndpointTracer.inject( + serverEndpoint, + context, + errorToSpanStatus + ) + } + + def traced( + k: ResourceKleisli[F, I, Span[F]], + errorToSpanStatus: ArmadilloStatusMapping[E] = ArmadilloStatusMapping.errorStringToInternal + )(implicit + P: Provide[F, G, Span[F]], + F: MonadCancelThrow[F], + G: Monad[G], + T: Trace[G] + ): JsonRpcServerEndpoint.Full[I, E, O, F] = + ServerEndpointTracer.inject( + serverEndpoint, + k.map(_.asRight[E]), + errorToSpanStatus + ) + + def injectContext[Ctx]( + entryPoint: EntryPoint[F], + makeContext: (I, Span[F]) => F[Either[E, Ctx]], + spanNamer: ArmadilloSpanNamer[I] = ArmadilloSpanNamer.methodName, + errorToSpanStatus: ArmadilloStatusMapping[E] = ArmadilloStatusMapping.errorStringToInternal + )(implicit + P: Provide[F, G, Ctx], + F: MonadCancelThrow[F], + G: Monad[G], + T: Trace[G] + ): JsonRpcServerEndpoint.Full[I, E, O, F] = { + val inputSpanNamer = spanNamer(serverEndpoint.endpoint, _) + val context = ArmadilloResourceKleislis.fromInputContext[F, I, E, Ctx]( + makeContext, + inputSpanNamer, + errorToSpanStatus + )(entryPoint.toKleisli) + ServerEndpointTracer.inject( + serverEndpoint, + context, + errorToSpanStatus + ) + } + + def tracedContext[Ctx]( + k: ResourceKleisli[F, I, Either[E, Ctx]], + errorToSpanStatus: ArmadilloStatusMapping[E] = ArmadilloStatusMapping.errorStringToInternal + )(implicit + P: Provide[F, G, Ctx], + F: MonadCancelThrow[F], + G: Monad[G], + T: Trace[G] + ): JsonRpcServerEndpoint.Full[I, E, O, F] = + ServerEndpointTracer.inject( + serverEndpoint, + k, + errorToSpanStatus + ) + } +} diff --git a/trace4cats/src/io/iohk/armadillo/trace4cats/ServerEndpointTracer.scala b/trace4cats/src/io/iohk/armadillo/trace4cats/ServerEndpointTracer.scala new file mode 100644 index 0000000..dea625f --- /dev/null +++ b/trace4cats/src/io/iohk/armadillo/trace4cats/ServerEndpointTracer.scala @@ -0,0 +1,43 @@ +package io.iohk.armadillo.trace4cats + +import cats.Monad +import cats.data.EitherT +import cats.effect.kernel.MonadCancelThrow +import io.iohk.armadillo.JsonRpcServerEndpoint +import io.janstenpickle.trace4cats.base.context.Provide +import io.janstenpickle.trace4cats.inject.{ResourceKleisli, Trace} +import sttp.tapir.integ.cats.MonadErrorSyntax._ + +object ServerEndpointTracer { + def inject[I, E, O, F[_], G[_], Ctx]( + serverEndpoint: JsonRpcServerEndpoint.Full[I, E, O, G], + k: ResourceKleisli[F, I, Either[E, Ctx]], + errorToSpanStatus: ArmadilloStatusMapping[E] + )(implicit + P: Provide[F, G, Ctx], + F: MonadCancelThrow[F], + G: Monad[G], + T: Trace[G] + ): JsonRpcServerEndpoint.Full[I, E, O, F] = { + JsonRpcServerEndpoint.apply( + endpoint = serverEndpoint.endpoint, + logic = MEF => + input => { + k(input).use { result => + EitherT + .fromEither[F](result) + .flatMap { ctx => + val lower = P.provideK(ctx) + val MEG = MonadErrorImapK(MEF).imapK(P.liftK)(lower) + EitherT { + serverEndpoint.logic(MEG)(input) + }.leftSemiflatTap { err => + Trace[G].setStatus(errorToSpanStatus(err)) + }.mapK(lower) + } + .value + } + } + ) + } +} diff --git a/trace4cats/src/io/iohk/armadillo/trace4cats/package.scala b/trace4cats/src/io/iohk/armadillo/trace4cats/package.scala new file mode 100644 index 0000000..bef7938 --- /dev/null +++ b/trace4cats/src/io/iohk/armadillo/trace4cats/package.scala @@ -0,0 +1,10 @@ +package io.iohk.armadillo + +import io.janstenpickle.trace4cats.inject.SpanName +import io.janstenpickle.trace4cats.model.SpanStatus + +package object trace4cats { + type ArmadilloSpanNamer[I] = (JsonRpcEndpoint[I, _, _], I) => SpanName + type ArmadilloInputSpanNamer[I] = I => SpanName + type ArmadilloStatusMapping[E] = E => SpanStatus +} diff --git a/trace4cats/src/io/iohk/armadillo/trace4cats/syntax/package.scala b/trace4cats/src/io/iohk/armadillo/trace4cats/syntax/package.scala new file mode 100644 index 0000000..df2b303 --- /dev/null +++ b/trace4cats/src/io/iohk/armadillo/trace4cats/syntax/package.scala @@ -0,0 +1,3 @@ +package io.iohk.armadillo.trace4cats + +package object syntax extends ServerEndpointSyntax