Compare commits

..

No commits in common. "1361d09025ee05fef1ffb5ed3186981d99eb4624" and "ccbaa4e247e070fc90323b1d4ba3d477d61147b0" have entirely different histories.

3 changed files with 24 additions and 15 deletions

View file

@ -1,6 +1,6 @@
[package] [package]
name = "nova-python" name = "nova-python"
version = "0.1.7" version = "0.1.6"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -9,7 +9,7 @@ name = "nova_python"
crate-type = ["cdylib"] crate-type = ["cdylib"]
[dependencies] [dependencies]
pyo3 = {version = "0.19.2", features = ["generate-import-lib"]} pyo3 = {version = "0.19.2", features = ["extension-module", "generate-import-lib"]}
reqwest = "0.11.18" reqwest = "0.11.18"
tokio = { version = "1.29.1", features = ["rt-multi-thread", "time"] } tokio = { version = "1.29.1", features = ["rt-multi-thread", "time"] }
serde_json = "1.0.104" serde_json = "1.0.104"

View file

@ -36,7 +36,7 @@ Now, to make a request, use the `make_request` function. For example:
from nova_python import Endpoints, Models, NovaClient from nova_python import Endpoints, Models, NovaClient
client = NovaClient("YOUR_API_KEY") client = NovaClient("YOUR_API_KEY")
response = client.make_request( reponse = client.make_request(
endpoint=Endpoints.CHAT_COMPLETION, endpoint=Endpoints.CHAT_COMPLETION,
model=Models.GPT3, model=Models.GPT3,
data=[ data=[
@ -54,7 +54,7 @@ or
from nova_python import Endpoints, Models, NovaClient from nova_python import Endpoints, Models, NovaClient
client = NovaClient("YOUR_API_KEY") client = NovaClient("YOUR_API_KEY")
response = client.make_request( reponse = client.make_request(
endpoint=Endpoints.MODERATION, endpoint=Endpoints.MODERATION,
model=Models.MODERATION_STABLE, model=Models.MODERATION_STABLE,
data=[{"input": "I'm going to kill them."}] data=[{"input": "I'm going to kill them."}]
@ -65,20 +65,20 @@ response = client.make_request(
If everything goes to plan, you'll receive a string containing JSON-Data, which you can then use in your project. If everything goes to plan, you'll receive a string containing JSON-Data, which you can then use in your project.
Note, that when using chat completion, as special ChatResponse-Instance get's returned. Note, that when using chat completion, as special ChatResponse-Instance get's returned.
You can access the response's json.data, by casting it to a string using the `str` method, like this: You can access the reponse's json.data, by casting it to a string using the `str` method, like this:
```python ```python
... ...
str(response) str(reponse)
... ...
``` ```
but more importantly, you can use it's `get_message_content` function, to directly get access to the chat response. Used like this: but more importantly, you can use it's `get_message_content` function, to directly get access to the chat reponse. Used like this:
``` ```
... ...
content = response.get_message_content() content = reponse.get_message_content()
... ...
``` ```

View file

@ -231,16 +231,25 @@ impl ChatResponse {
} }
fn model_is_compatible(endpoint: &Endpoints, model: &Models) -> bool { fn model_is_compatible(endpoint: &Endpoints, model: &Models) -> bool {
let chat_models = [Models::Gpt3, Models::Gpt4]; if endpoint == &Endpoints::ChatCompletion {
let moderation_models = [Models::ModerationStable, Models::ModerationLatest]; if [Models::Gpt3, Models::Gpt4].contains(model) {
return true;
match endpoint { } else {
Endpoints::ChatCompletion => chat_models.contains(model), return false;
Endpoints::Moderation => moderation_models.contains(model),
_ => false
} }
} }
else if endpoint == &Endpoints::Moderation {
if [Models::ModerationStable, Models::ModerationLatest].contains(model) {
return true;
} else {
return false;
}
}
false
}
fn key_is_valid(api_key: &str) -> bool { fn key_is_valid(api_key: &str) -> bool {
if !api_key.starts_with("nv-") { if !api_key.starts_with("nv-") {
return false; return false;