Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bugfix in ExperimentData's handling of metadata save #1229

Merged
merged 10 commits into from
Jul 20, 2023
8 changes: 5 additions & 3 deletions qiskit_experiments/framework/experiment_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import time
import io
import sys
import json
import traceback
import numpy as np
from dateutil import tz
Expand Down Expand Up @@ -1470,7 +1471,7 @@ def _save_experiment_metadata(self, suppress_errors: bool = True) -> None:

if handle_metadata_separately:
self.service.file_upload(
self._db_data.experiment_id, self._metadata_filename, metadata
self._db_data.experiment_id, self._metadata_filename, metadata, json_encoder=self._json_encoder
)
self._db_data.metadata = metadata

Expand All @@ -1483,7 +1484,8 @@ def _save_experiment_metadata(self, suppress_errors: bool = True) -> None:
def _metadata_too_large(self):
"""Determines whether the metadata should be stored in a separate file"""
# currently the entire POST JSON request body is limited by default to 100kb
return sys.getsizeof(self.metadata) > 10000
total_metadata_size = sys.getsizeof(json.dumps(self.metadata, cls=self._json_encoder))
return total_metadata_size > 10000

def save(
self,
Expand Down Expand Up @@ -2057,7 +2059,7 @@ def load(
service = cls.get_service_from_provider(provider)
data = service.experiment(experiment_id, json_decoder=cls._json_decoder)
if service.experiment_has_file(experiment_id, cls._metadata_filename):
metadata = service.file_download(experiment_id, cls._metadata_filename)
metadata = service.file_download(experiment_id, cls._metadata_filename, json_decoder=cls._json_decoder)
data.metadata.update(metadata)
expdata = cls(service=service, db_data=data, provider=provider)

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
---
fixes:
- |
Fixed a bug in :class:`.ExperimentData` which caused experiment saves to the cloud service to fail when the metadata is large.
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ numpy>=1.17
# see https://github.com/cvxpy/cvxpy/issues/2158
scipy>=1.4,<1.11.0
qiskit-terra>=0.24
qiskit-ibm-experiment>=0.3.3
qiskit-ibm-experiment>=0.3.4
matplotlib>=3.4
uncertainties
lmfit
Expand Down
11 changes: 11 additions & 0 deletions test/database_service/test_db_experiment_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -1114,3 +1114,14 @@ def test_getters(self):
metadata = {"_source": "source_data"}
data._db_data.metadata = metadata
self.assertEqual(data.source, "source_data")

def test_metadata_too_large(self):
"""Tests that ExperimentData can detect when the metadta
should be saved as a seperate file"""
exp_data = ExperimentData()
metadata_size = 100000
exp_data.metadata["components"] = [
{"physical_qubits": [0], "device_components": [f"Qubit{n}"]}
for n in range(metadata_size)
]
self.assertTrue(exp_data._metadata_too_large())