With S3
For detailed instructions on setting up your S3 bucket, refer to the Bucket Management documentation.
We can now start issuing problems to the TitanQ Solver. Below are some examples with expected results that can be run from your favorite IDE and Python environments locally.
Note that the following Python modules need to be installed:
- boto3 (for AWS access)
- numpy (for data generation exchange)
Generate a weights matrix and a bias vector
Here are some example input weights matrix and bias vectors:
- N10
- N150
- K2000
import numpy as np
weights = np.array([[ 0, -1, -1, -1, 1, -1, 1, -1, -1, 1],
[-1, 0, 1, -1, -1, 1, -1, -1, -1, 1],
[-1, 1, 0, -1, 1, -1, 1, -1, -1, -1],
[-1, -1, -1, 0, -1, 1, 1, -1, 1, -1],
[ 1, -1, 1, -1, 0, -1, -1, 1, -1, 1],
[-1, 1, -1, 1, -1, 0, -1, -1, 1, 1],
[ 1, -1, 1, 1, -1, -1, 0, 1, 1, 1],
[-1, -1, -1, -1, 1, -1, 1, 0, 1, 1],
[-1, -1, -1, 1, -1, 1, 1, 1, 0, 1],
[ 1, 1, -1, -1, 1, 1, 1, 1, 1, 0]], dtype=np.float32)
bias = np.zeros(len(weights), dtype=np.float32)
np.save('weights.npy', weights)
np.save('bias.npy', bias)
Upload the two files to the source bucket.
From the S3 dashboard on the web console, navigate to your chosen bucket, then click Upload:
or programmatically, using access keys belonging to a user having write access to the source bucket (see section Expanding on the Example)
- Python
- AWS CLI
import boto3
s3 = boto3.client('s3',
aws_access_key_id=SUPERVISOR_ACCESS_KEY_ID,
aws_secret_access_key=SUPERVISOR_SECRET_ACCESS_KEY)
s3_client.upload_file(
'YOUR_LOCAL_WEIGHTS_FILE.npy',
'SOURCE_BUCKET_NAME',
'weights.npy')
s3_client.upload_file(
'YOUR_LOCAL_BIAS_FILE.npy',
'SOURCE_BUCKET_NAME',
'bias.npy')
Assuming the AWS CLI is installed and your supervisor user's credentials are present either as environment variables or in ~/.aws/credentials
:
#!/bin/bash
aws s3 cp YOUR_LOCAL_WEIGHTS_FILE.npy s3://SOURCE_BUCKET_NAME/weights.npy
aws s3 cp YOUR_LOCAL_BIAS_FILE.npy s3://SOURCE_BUCKET_NAME/bias.npy
Issuing a solve request
Submitting a request to the TitanQ API involves preparing a JSON body and some HTTP Header parameters.
In the example below, make sure to fill in...
- your API key (
API_KEY
) - your read user credentials (
READ_USER_*
) - your source bucket name (
SOURCE_BUCKET_NAME
) - your write user credentials (
WRITE_USER_*
) - your destination bucket name (
DEST_BUCKET_NAME
) - your supervisor user credentials (
SUPERVISOR_*
)
These problems were originally meant as bipolar problems. Here, we solve them as binary. To use them in the original bipolar-native setting, please use the bipolar-to-binary converter provided in the TitanQ SDK.
- N10
- N150
- K2000
import io
import json
import numpy as np
import requests
import time
import zipfile
import boto3
def main():
hdr = {
'content-type': 'application/json',
'authorization': 'API_KEY'
}
titanq_req_body = {
'input': {
'bias_file_name': 'bias.npy',
'weights_file_name': 'weights.npy',
's3': {
'bucket_name': 'SOURCE_BUCKET_NAME',
'access_key_id': 'READ_USER_ACCESS_KEY_ID',
'secret_access_key': 'READ_USER_SECRET_ACCESS_KEY'
}
},
'output': {
's3': {
'bucket_name': 'DEST_BUCKET_NAME',
'access_key_id': 'WRITE_USER_ACCESS_KEY',
'secret_access_key': 'WRITE_USER_SECRET_ACCESS_KEY'
},
'result_archive_file_name': 'titanq/quickstart_result.zip'
},
'parameters': {
'beta': (1/(np.linspace(0.5, 100, 8, dtype=np.float32))).tolist(),
'coupling_mult': 0.5,
'num_chains': 8,
'num_engines': 4,
'timeout_in_secs': 0.1,
'variable_types': 'b'*10,
}
}
try:
resp = requests.post(
'https://titanq.infinityq.io/v1/solve',
headers=hdr,
data=json.dumps(titanq_req_body))
if 200 <= resp.status_code < 300:
# Parse the request's confirmation
titanq_response_body = json.loads(resp.content)
message = titanq_response_body['message']
status = titanq_response_body['status']
computation_id = titanq_response_body['computation_id']
# Wait for computation to complete.
# Guesstimate, assuming no delay in queue, etc.
time.sleep(titanq_req_body['parameters']['timeout_in_secs'] + 3)
# using an AWS user with read access to the destination bucket
s3_client = boto3.client('s3',
aws_access_key_id='SUPERVISOR_ACCESS_KEY_ID',
aws_secret_access_key='SUPERVISOR_SECRET_ACCESS_KEY_ID')
# fetch the result archive file
remote_object = s3_client.get_object(
Bucket=titanq_req_body['output']['s3']['bucket_name'],
Key=titanq_req_body['output']['result_archive_file_name']
)
# in memory buffer
buff = io.BytesIO(remote_object['Body'].read())
# unzip to the current directory
with zipfile.ZipFile(buff, 'r') as zip_file:
zip_file.extractall(".")
# Inspect results
with open('metrics.json', 'r') as metrics:
metrics = json.load(metrics)
print("-" * 15, "+", "-" * 22, sep="")
print("Ising energy | Expected ising energy")
print("-" * 15, "+", "-" * 22, sep="")
for ising_energy in metrics['ising_energy']:
print(f"{ising_energy: <14f} | -35")
else:
print('Request yielded HTTP {}'.format(resp.status))
except Exception as e:
print('Exception ', e)
if __name__ == '__main__':
main()
import io
import json
import numpy as np
import requests
import time
import zipfile
import boto3
def main():
hdr = {
'content-type': 'application/json',
'authorization': 'API_KEY'
}
titanq_req_body = {
'input': {
'bias_file_name': 'N150-bias.npy',
'weights_file_name': 'N150-weights.npy',
's3': {
'bucket_name': 'SOURCE_BUCKET_NAME',
'access_key_id': 'READ_USER_ACCESS_KEY_ID',
'secret_access_key': 'READ_USER_SECRET_ACCESS_KEY'
}
},
'output': {
's3': {
'bucket_name': 'DEST_BUCKET_NAME',
'access_key_id': 'WRITE_USER_ACCESS_KEY',
'secret_access_key': 'WRITE_USER_SECRET_ACCESS_KEY'
},
'result_archive_file_name': 'titanq/quickstart_result.zip'
},
'parameters': {
'beta': (1/(np.linspace(0.5, 40, 16, dtype=np.float32))).tolist(),
'coupling_mult': 0.5,
'num_chains': 16,
'num_engines': 4,
'timeout_in_secs': 0.5,
'variable_types': 'b'*150,
}
}
try:
resp = requests.post(
'https://titanq.infinityq.io/v1/solve',
headers=hdr,
data=json.dumps(titanq_req_body))
if 200 <= resp.status_code < 300:
# Parse the request's confirmation
titanq_response_body = json.loads(resp.content)
message = titanq_response_body['message']
status = titanq_response_body['status']
computation_id = titanq_response_body['computation_id']
# Wait for computation to complete.
# Guesstimate, assuming no delay in queue, etc.
time.sleep(titanq_req_body['parameters']['timeout_in_secs'] + 3)
# using an AWS user with read access to the destination bucket
s3_client = boto3.client('s3',
aws_access_key_id='SUPERVISOR_ACCESS_KEY_ID',
aws_secret_access_key='SUPERVISOR_SECRET_ACCESS_KEY_ID')
# fetch the result archive file
remote_object = s3_client.get_object(
Bucket=titanq_req_body['output']['s3']['bucket_name'],
Key=titanq_req_body['output']['result_archive_file_name']
)
# in memory buffer
buff = io.BytesIO(remote_object['Body'].read())
# unzip to the current directory
with zipfile.ZipFile(buff, 'r') as zip_file:
zip_file.extractall(".")
# Inspect results
with open('metrics.json', 'r') as metrics:
metrics = json.load(metrics)
print("-" * 15, "+", "-" * 22, sep="")
print("Ising energy | Expected ising energy")
print("-" * 15, "+", "-" * 22, sep="")
for ising_energy in metrics['ising_energy']:
print(f"{ising_energy: <14f} | -2657")
else:
print('Request yielded HTTP {}'.format(resp.status))
except Exception as e:
print('Exception ', e)
if __name__ == '__main__':
main()
import io
import json
import numpy as np
import requests
import time
import zipfile
import boto3
def main():
hdr = {
'content-type': 'application/json',
'authorization': 'API_KEY'
}
titanq_req_body = {
'input': {
'bias_file_name': 'K2000-bias.npy',
'weights_file_name': 'K2000-weights.npy',
's3': {
'bucket_name': 'SOURCE_BUCKET_NAME',
'access_key_id': 'READ_USER_ACCESS_KEY_ID',
'secret_access_key': 'READ_USER_SECRET_ACCESS_KEY'
}
},
'output': {
's3': {
'bucket_name': 'DEST_BUCKET_NAME',
'access_key_id': 'WRITE_USER_ACCESS_KEY',
'secret_access_key': 'WRITE_USER_SECRET_ACCESS_KEY'
},
'result_archive_file_name': 'titanq/quickstart_result.zip'
},
'parameters': {
'beta': (1/(np.linspace(2, 50, 32, dtype=np.float32))).tolist(),
'coupling_mult': 0.13,
'num_chains': 32,
'num_engines': 4,
'timeout_in_secs': 10.0,
'variable_types': 'b'*2000,
}
}
try:
resp = requests.post(
'https://titanq.infinityq.io/v1/solve',
headers=hdr,
data=json.dumps(titanq_req_body))
if 200 <= resp.status_code < 300:
# Parse the request's confirmation
titanq_response_body = json.loads(resp.content)
message = titanq_response_body['message']
status = titanq_response_body['status']
computation_id = titanq_response_body['computation_id']
# Wait for computation to complete.
# Guesstimate, assuming no delay in queue, etc.
time.sleep(titanq_req_body['parameters']['timeout_in_secs'] + 10)
# using an AWS user with read access to the destination bucket
s3_client = boto3.client('s3',
aws_access_key_id='SUPERVISOR_ACCESS_KEY_ID',
aws_secret_access_key='SUPERVISOR_SECRET_ACCESS_KEY_ID')
# fetch the result archive file
remote_object = s3_client.get_object(
Bucket=titanq_req_body['output']['s3']['bucket_name'],
Key=titanq_req_body['output']['result_archive_file_name']
)
# in memory buffer
buff = io.BytesIO(remote_object['Body'].read())
# unzip to the current directory
with zipfile.ZipFile(buff, 'r') as zip_file:
zip_file.extractall(".")
# Inspect results
with open('metrics.json', 'r') as metrics:
metrics = json.load(metrics)
print("-" * 15, "+", "-" * 22, sep="")
print("Ising energy | Expected ising energy")
print("-" * 15, "+", "-" * 22, sep="")
for ising_energy in metrics['ising_energy']:
print(f"{ising_energy: <14f} | -134388")
else:
print('Request yielded HTTP {}'.format(resp.status))
except Exception as e:
print('Exception ', e)
if __name__ == '__main__':
main()