Added the moderation endpoint and fixed the readme

This commit is contained in:
henceiusegentoo 2023-08-07 11:39:57 +02:00
parent fe5709275e
commit b232fa533c
4 changed files with 61 additions and 8 deletions

View file

@ -1,6 +1,6 @@
[package] [package]
name = "nova-python" name = "nova-python"
version = "0.1.0" version = "0.1.1"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -2,6 +2,11 @@
🐍 Python library for accessing the Nova API 🐍 Python library for accessing the Nova API
## Usage ## ## Usage ##
Install the module (This requires <a href="https://rustup.rs/">Cargo</a>)
```sh
$ pip install nova-python
```
Import the module Import the module
```python ```python
from nova_python import Endpoints, Models, NovaClient from nova_python import Endpoints, Models, NovaClient
@ -15,14 +20,17 @@ client = NovaClient("YOUR_API_KEY")
nova_python currently implements two enums: Endpoints and Models. Those contain: nova_python currently implements two enums: Endpoints and Models. Those contain:
### Endpoints ### **Endpoints**
* `Endpoints.CHAT_COMPLETION` * `Endpoints.CHAT_COMPLETION`
* `Endpoints.MODERATION`
**Models** **Models**
* `Models.GPT3` * `Models.GPT3`
+ `Models.GPT4` * `Models.GPT4`
* `Models.MODERATION_LATEST`
* `Models.MODERATION_STABLE`
Now, to make a request, use the `make_request` function. Now, to make a request, use the `make_request` function. For example:
```python ```python
from nova_python import Endpoints, Models, NovaClient from nova_python import Endpoints, Models, NovaClient
@ -38,7 +46,25 @@ client.make_request(
) )
``` ```
or
```python
from nova_python import Endpoints, Models, NovaClient
client = NovaClient("YOUR_API_KEY")
client.make_request(
endpoint=Endpoints.MODERATION,
model=Models.MODERATION_STABLE,
data=[{"input": "I'm going to kill them."}]
)
```
If everything goes to plan, you'll receive a string containing JSON-Data, which you can then use in your project. If everything goes to plan, you'll receive a string containing JSON-Data, which you can then use in your project.
*Happy prompting!* *Happy prompting!*
## FAQ ##
**Q:** I get an error when installing the package
**A:** Make you sure, that you have <a href="https://rustup.rs/">Cargo</a> installed
Made with 🩸 by Leander Made with 🩸 by Leander

View file

@ -5,7 +5,7 @@ build-backend = "maturin"
[project] [project]
name = "nova-python" name = "nova-python"
authors = [ authors = [
{name = "Leander"} {name = "Leander <@henceiusegentoo>"}
] ]
description = "🐍 Python library for accessing the Nova API" description = "🐍 Python library for accessing the Nova API"
@ -16,6 +16,7 @@ classifiers = [
"Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: PyPy",
] ]
repository = "https://github.com/NovaOSS/nova-python"
[tool.maturin] [tool.maturin]
features = ["pyo3/extension-module"] features = ["pyo3/extension-module"]

View file

@ -10,7 +10,11 @@ enum Models {
#[pyo3(name = "GPT3")] #[pyo3(name = "GPT3")]
Gpt3, Gpt3,
#[pyo3(name = "GPT4")] #[pyo3(name = "GPT4")]
Gpt4 Gpt4,
#[pyo3(name = "MODERATION_LATEST")]
ModerationLatest,
#[pyo3(name = "MODERATION_STABLE")]
ModerationStable,
} }
#[pyclass(module = "nova_python", frozen)] #[pyclass(module = "nova_python", frozen)]
@ -19,6 +23,8 @@ enum Models {
enum Endpoints { enum Endpoints {
#[pyo3(name = "CHAT_COMPLETION")] #[pyo3(name = "CHAT_COMPLETION")]
ChatCompletion, ChatCompletion,
#[pyo3(name = "MODERATION")]
Moderation,
} }
#[pyclass(module = "nova_python", frozen)] #[pyclass(module = "nova_python", frozen)]
@ -84,6 +90,7 @@ impl NovaClient {
fn get_request_url(&self, endpoint: &Endpoints) -> PyResult<String> { fn get_request_url(&self, endpoint: &Endpoints) -> PyResult<String> {
match endpoint { match endpoint {
Endpoints::ChatCompletion => Ok(format!("{}chat/completions", self.url)), Endpoints::ChatCompletion => Ok(format!("{}chat/completions", self.url)),
Endpoints::Moderation => Ok(format!("{}moderations", self.url)),
_ => Err(NovaClient::get_invalid_endpoint_error()) _ => Err(NovaClient::get_invalid_endpoint_error())
} }
} }
@ -105,6 +112,8 @@ impl NovaClient {
let model = match model { let model = match model {
Models::Gpt3 => "gpt-3.5-turbo", Models::Gpt3 => "gpt-3.5-turbo",
Models::Gpt4 => "gpt-4", Models::Gpt4 => "gpt-4",
Models::ModerationLatest => "text-moderation-latest",
Models::ModerationStable => "text-moderation-stable",
_ => return Err(NovaClient::get_invalid_model_error()) _ => return Err(NovaClient::get_invalid_model_error())
}; };
request_body.push_str(&format!("\"model\":\"{}\"", model)); request_body.push_str(&format!("\"model\":\"{}\"", model));
@ -132,7 +141,16 @@ impl NovaClient {
request_body.push_str("]"); request_body.push_str("]");
} else { }
else if endpoint == &Endpoints::Moderation {
request_body.push_str(",\"input\":");
let input = format!("\"{}\"", request_data.get(0).unwrap().get("input").unwrap());
request_body.push_str(&input);
}
else {
return Err(NovaClient::get_invalid_endpoint_error()); return Err(NovaClient::get_invalid_endpoint_error());
} }
@ -169,6 +187,14 @@ fn model_is_compatible(endpoint: &Endpoints, model: &Models) -> bool {
return false; return false;
} }
} }
else if endpoint == &Endpoints::Moderation {
if [Models::ModerationStable, Models::ModerationLatest].contains(model) {
return true;
} else {
return false;
}
}
false false
} }