API
Docs¶
In order to integrate Alviss AI in to excising systems we provide an API one can use, the documentation is available here https://api.alviss.io/v1/api/docs
Note
In order to access the API documentation one must first have accessed https://app.alviss.io/ and be signed in`
Authentication¶
To authenticate your requests, you will need to provide an access token. To create the access token, please see Create Access Token
Once you have the access token, you can authenticate your request by sending the token in the Authorization
header of your request. For example, in the following request, replace YOUR-TOKEN
with a reference to your token:
curl --request GET \
--url "https://api.alviss.io/v1/team/teams" \
--header "Authorization: Bearer YOUR-TOKEN" \
--header "Accept: application/json"
Access Token¶
to manage your access token you can visit https://app.alviss.io/-/user#access-tokens or by clicking on your profile picture in the top right corner of the Alviss AI platform, and click on Your Account menu, then click on the Access Token tab.
Create Access Token¶
When you click on Create New Token a prompt will be shown with fields:
- Name: just a name of your token
- Expires at: the expire time of the token. to keep your tokens secure, consider setting a token that expires when your task is complete. We only allow a token to be expires within 365 days.
after submitting the prompt, your new access token will be shown once and you must save that token somewhere, once the prompt is closed you will not be able to see it anymore.
Revoking Access Token¶
You can just revoke by pressing Revoke button from the list of your tokens
Example data upload¶
Here is an example python script that
- uploads a file
- creates a dataset using the file
- activates the dataset
""" Script to upload files and create a dataset in Alviss AI. see https://api.alviss.io/v1/api/docs#/ for the API documentation """ import time import requests url = "https://app.alviss.io/api/v1/api" token = "<set me>" # access token created in https://app.alviss.io/-/user team_id = "<set me>" project_id = "<set me>" file_path = "<set me>" # the file we want to upload headers = {"Authorization": "Bearer " + token} team_project_url = url + f"/projects/{team_id}/{project_id}" # Create a data upload response = requests.post( team_project_url + "/datauploads", headers=headers, params={"dataset_name": "Sales"}, # the file type: [Sales,Events, Media, Extra, .....] files={"file": open(file_path, "rb")}, ) # wait for the dataupload to be completed upload_id = response.json().get("upload_id") while True: response = requests.get( team_project_url + f"/datauploads/{upload_id}", headers=headers, ) if response.json().get("Status") == "complete": break print(response.json()) time.sleep(1) # create the dataset response = requests.post( team_project_url + "/datasets", headers=headers, json={ "upload_ids": [upload_id], # "activate": True, # if one want to activate the dataset right away # "dataset_ids": [8], # used to extend a dataset }, ) dataset_id = response.json().get("IId") # wait for the dataupload to complete while True: response = requests.get( team_project_url + f"/datasets/{dataset_id}", headers=headers, ) if response.json().get("Status") == "complete": break print(response.json()) time.sleep(1) # activate the dataset response = requests.post( team_project_url + f"/datasets/{dataset_id}/activate", headers=headers, )
Upload - Refit - Attribute¶
Here is a basic scenario where we want to do continuous updates where we
- upload new data
- Create a new dapset
- Run a model refit
- Create an attribution
import time
import requests
url = "https://app.alviss.io/api/v1/api"
token = "<SET ME>"
team_id = "<SET ME>"
project_id = "<SET ME>"
file_path = "<SET ME>"
headers = {"Authorization": "Bearer " + token}
team_project_url = url + f"/projects/{team_id}/{project_id}"
# Model refit with new dataset
existing_model_id = 3
model_country = "SWE"
model_region = "all"
model_grouping = "all"
response = requests.post(
team_project_url + "/datauploads",
headers=headers,
params={"dataset_name": "Sales"},
files={"file": open(file_path, "rb")},
)
upload_id = response.json().get("upload_id")
while True:
print("Waiting for upload to complete")
response = requests.get(
team_project_url + f"/datauploads/{upload_id}",
headers=headers,
)
if response.json().get("Status") == "complete":
break
print(response.json())
time.sleep(2)
#get active dataset
response = requests.get(
team_project_url + "/datasets/active",
headers=headers,
)
active_dataset_id = response.json()["IId"]
response = requests.post(
team_project_url + "/datasets",
headers=headers,
json={"upload_ids": [upload_id], "dataset_ids": [active_dataset_id]},
)
dataset_id = response.json().get("IId")
while True:
print("Waiting for dataset to complete")
response = requests.get(
team_project_url + f"/datasets/{dataset_id}",
headers=headers,
)
if response.json().get("Status") == "complete":
break
print(response.json())
time.sleep(2)
response = requests.post(
team_project_url + f"/datasets/{dataset_id}/dates",
headers=headers,
params={"dataset_name": "Sales"},
json=[
{
"country_code": model_country,
"region_code": model_region,
"grouping": model_grouping,
}
],
)
data_dates = response.json()
separator_date = int(len(data_dates) * 0.75)
train_dates = data_dates[:separator_date]
eval_dates = data_dates[separator_date:]
response = requests.post(
team_project_url + f"/models/{existing_model_id}/refit",
headers=headers,
json={
"model_detail": {
"dataset_id": dataset_id,
"train_dates": train_dates,
"eval_dates": eval_dates,
},
"model_param": {"epochs": 500, "learning_rate": 0.001, "samples": 3},
},
)
model_id = response.json()["IId"]
while True:
print("Waiting for model to complete")
response = requests.get(
team_project_url + f"/models/{model_id}",
headers=headers,
)
if response.json().get("Status") == "completed":
break
print(response.json())
time.sleep(2)
# run attribution
response = requests.post(
team_project_url + "/attributions",
headers=headers,
json={
"model_id": model_id,
"dataset_id": dataset_id,
"start_date": data_dates[0],
"end_date": data_dates[-1],
"samples": 20,
},
)
attribution_id = response.json()["IId"]
while True:
print("Waiting for attribution to complete")
response = requests.get(
team_project_url + f"/attributions/{attribution_id}",
headers=headers,
)
if response.json().get("Status") == "completed":
break
print(response.json())
time.sleep(2)
Extend Active Attribution Set¶
How to extend the active attribution set.
import time
import requests
url = "https://app.alviss.io/api/v1/api"
token = "<SET ME>"
team_id = "<SET ME>"
project_id = "<SET ME>"
headers = {"Authorization": "Bearer " + token}
team_project_url = url + f"/projects/{team_id}/{project_id}"
# get active attribution set
response = requests.get(
team_project_url + "/attributionset/active",
headers=headers,
)
attribution_id = 5
attribution_ids = [_attr["IId"] for _attr in response.json()["Attributions"]]
response = requests.post(
team_project_url + "/attributionset_align_dates",
headers=headers,
json={
"attributionjob_ids": [attribution_id, *attribution_ids],
"active": False, # set to True if you want to make this attribution set active
},
)
attribution_set_id = response.json().get("IId")
while True:
print("Waiting for attribution set to complete")
response = requests.get(
team_project_url + f"/attributionset/{attribution_set_id}",
headers=headers,
)
if response.json().get("Status") == "complete":
break
print(response.json())
time.sleep(2)
# manually activate the attribution set
response = requests.patch(
team_project_url + f"/attributionset/{attribution_set_id}/activate",
headers=headers,
)
Create a new model based on an excising model¶
Here we outline how one can do a “remodel” instead of a “refit”, that is one does not solely want to update an excising model with new data but wants to create a new model and train it from scratch (where possibly different modeling assumptions are made).
import requests
url = "https://app.alviss.io/api/v1/api"
token = "<SET ME>"
team_id = "<SET ME>"
project_id = "<SET ME>"
headers = {"Authorization": "Bearer " + token}
team_project_url = url + f"/projects/{team_id}/{project_id}"
target_model_id = 11
response = requests.get(
team_project_url + f"/models/{target_model_id}",
headers=headers,
)
model_info = response.json()
high_level_graph_id = model_info["GraphInstance"]["Graph"]["Id"]
dataset_id = model_info["DataSet"]["IId"]
response = requests.get(
team_project_url + f"/build/graphs/{high_level_graph_id}",
headers=headers,
)
high_level_graph = response.json()
response = requests.post(
team_project_url + f"/datasets/{dataset_id}/dates",
headers=headers,
params={"dataset_name": "Sales"},
json=[
{
"country_code": model_info["Country"],
"region_code": model_info["Region"],
"grouping": model_info["Grouping"],
}
],
)
data_dates = response.json()
separator_date = int(len(data_dates) * 0.75)
train_dates = data_dates[:separator_date]
eval_dates = data_dates[separator_date:]
# submit the model using high level graph
response = requests.post(
team_project_url + "/build/model",
headers=headers,
json={
"model_param": {
"epochs": 50,
"learning_rate": 0.01,
"samples": 2,
},
"model_detail": {
"dataset_id": dataset_id,
"train_dates": train_dates,
"eval_dates": eval_dates,
"activate": False, # auto activate
"modelling_combination": {
"country_code": model_info["Country"],
"region_code": model_info["Region"],
"grouping": model_info["Grouping"],
},
},
"graph_detail": {
"graph": {
"nodes": high_level_graph["nodes"],
"edges": high_level_graph["edges"],
}
},
},
)
# build model with low level graph
low_level_graph_id = model_info["GraphInstance"]["Id"]
response = requests.get(
url + f"/graph_instances/{team_id}/{low_level_graph_id}",
headers=headers,
)
low_level_graph = response.json()
response = requests.post(
team_project_url + "/build/model",
headers=headers,
json={
"model_param": {
"epochs": 50,
"learning_rate": 0.01,
"samples": 2,
},
"model_detail": {
"dataset_id": dataset_id,
"train_dates": train_dates,
"eval_dates": eval_dates,
"activate": False, # auto activate
"modelling_combination": {
"country_code": model_info["Country"],
"region_code": model_info["Region"],
"grouping": model_info["Grouping"],
},
},
"graph_detail": {
"graph": {
"nodes": low_level_graph["nodes"],
"edges": low_level_graph["edges"],
}
},
},
)