aito.api

Different APIs that takes an Aito Client object as the first argument

Functions

check_table_exists(client, table_name)

check if a table exists in the instance

copy_table(client, table_name, copy_table_name)

copy a table

create_column(client, table_name, …)

add or replace a column

create_database(client, schema)

create a database using the specified database schema

create_job(client[, job_endpoint, query, …])

Create a job for a query that takes longer than 30 seconds to run

create_table(client, table_name, schema)

create a table with the specified table name and schema

delete_column(client, table_name, column_name)

delete a column of a table

delete_database(client)

delete the whole database

delete_entries(client, query[, use_job])

Delete the entries according to the criteria given in the query

delete_table(client, table_name)

delete the specified table

download_table(client, table_name, output_folder)

download a table to a NDJSON file or a gzipped NDJSON file

evaluate(client, query[, raise_for_status, …])

send a query to the Evaluate API

generic_query(client, query[, …])

send a query to the Generic Query API

get_column_schema(client, table_name, …)

get the schema of the specified column

get_database_schema(client)

get the schema of the database

get_existing_tables(client)

get a list of existing tables in the instance

get_job_result(client, job_id)

Get the result of a job with the specified job id

get_job_status(client, job_id)

Get the status of a job with the specified job id

get_table_schema(client, table_name)

get the schema of the specified table

get_table_size(client, table_name)

return the number of entries of the specified table

get_version(client)

get the aito instance version

initiate_upload_file(client, table_name)

Initial uploading a file to a table

job_request(client[, job_endpoint, query, …])

make a request to an Aito API endpoint using Job

match(client, query[, raise_for_status, use_job])

send a query to the Match API

optimize_table(client, table_name[, use_job])

optimize the specified table after uploading the data.

poll_file_processing_status(client, …[, …])

Polling the file processing status until the processing finished

predict(client, query[, raise_for_status, …])

send a query to the Predict API

query_all_entries(client, table_name[, …])

query all entries of the specified table

query_entries(client, table_name[, offset, …])

query entries of the specified table

quick_add_table(client, input_file[, …])

Create a table and upload a file to the table, using the default inferred schema

quick_predict(client, from_table, …)

generate an example predict query to predict a field

quick_predict_and_evaluate(client, …)

generate an example predict query to predict a field and the corresponding evaluate query

recommend(client, query[, raise_for_status, …])

send a query to the Recommend API

relate(client, query[, raise_for_status, …])

send a query to the Relate API

rename_table(client, old_name, new_name[, …])

rename a table

search(client, query[, raise_for_status, …])

send a query to the Search API

similarity(client, query[, …])

send a query to the Similarity API

trigger_file_processing(client, table_name, …)

Trigger file processing of uploading a file to a table

upload_binary_file(client, table_name, …)

upload a binary file object to a table

upload_binary_file_to_s3(…)

Upload a binary file to AWS S3 with using the information from initiate_upload_file()

upload_entries(client, table_name, entries)

populate table entries by batches of batch_size

upload_file(client, table_name, file_path[, …])

upload a file to the specfied table