Skip to content

Commit

Permalink
Expand to copying specs and entries individually
Browse files Browse the repository at this point in the history
  • Loading branch information
bennybp committed Feb 14, 2025
1 parent 84b88bb commit 6f29b6f
Show file tree
Hide file tree
Showing 12 changed files with 225 additions and 59 deletions.
5 changes: 4 additions & 1 deletion qcfractal/qcfractal/components/dataset_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ def clone_dataset_v1(body_data: DatasetCloneBody):
return ds_socket.clone(body_data.source_dataset_id, body_data.new_dataset_name, session=session)


@api_v1.route("/datasets/<string:dataset_type>/<int:dataset_id>/copy_records", methods=["POST"])
@api_v1.route("/datasets/<string:dataset_type>/<int:dataset_id>/copy_from", methods=["POST"])
@wrap_route("WRITE")
def copy_from_dataset_v1(dataset_type: str, dataset_id: int, body_data: DatasetCopyFromBody):
# the dataset_id in the URI is the destination dataset
Expand All @@ -450,5 +450,8 @@ def copy_from_dataset_v1(dataset_type: str, dataset_id: int, body_data: DatasetC
dataset_id,
body_data.entry_names,
body_data.specification_names,
body_data.copy_entries,
body_data.copy_specifications,
body_data.copy_records,
session=session,
)
38 changes: 26 additions & 12 deletions qcfractal/qcfractal/components/dataset_socket.py
Original file line number Diff line number Diff line change
Expand Up @@ -1721,6 +1721,9 @@ def copy_from(
destination_dataset_id: int,
entry_names: Optional[Iterable[str]] = None,
specification_names: Optional[Iterable[str]] = None,
copy_entries: bool = False,
copy_specifications: bool = False,
copy_records: bool = False,
*,
session: Optional[Session] = None,
):
Expand All @@ -1740,6 +1743,14 @@ def copy_from(
Only copy records for these entries. If none, copy records for all entries
specification_names
Only copy records for these specifications. If none, copy records for all specifications
copy_entries
If True, copy entries from the source dataset.
copy_entries
If True, copy entries from the source dataset.
copy_specifications
If True, copy specifications from the source dataset.
copy_records
If True, copy record (items) from the source dataset. Implies copy_entries and copy_specifications.
session
An existing SQLAlchemy session to use. If None, one will be created. If an existing session
is used, it will be flushed (but not committed) before returning from this function.
Expand All @@ -1755,21 +1766,24 @@ def copy_from(
)

# Copy specifications
self.copy_specifications(
session, source_dataset_id, destination_dataset_id, specification_names=specification_names
)
if copy_specifications or copy_records:
self.copy_specifications(
session, source_dataset_id, destination_dataset_id, specification_names=specification_names
)

# Copy entries
self.copy_entries(session, source_dataset_id, destination_dataset_id, entry_names=entry_names)
if copy_entries or copy_records:
self.copy_entries(session, source_dataset_id, destination_dataset_id, entry_names=entry_names)

# Copy record items
self.copy_record_items(
session,
source_dataset_id,
destination_dataset_id,
entry_names=entry_names,
specification_names=specification_names,
)
if copy_records:
self.copy_record_items(
session,
source_dataset_id,
destination_dataset_id,
entry_names=entry_names,
specification_names=specification_names,
)

def clone(
self,
Expand Down Expand Up @@ -1823,7 +1837,7 @@ def clone(
#################################################
# Copy entries, specifications, and record items
#################################################
self.copy_from(source_dataset_id, new_dataset_id, session=session)
self.copy_from(source_dataset_id, new_dataset_id, copy_records=True, session=session)

#####################
# Contributed values
Expand Down
2 changes: 1 addition & 1 deletion qcfractal/qcfractal/components/test_dataset_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,4 +243,4 @@ def test_dataset_client_copy_from_incompatible(snowflake_client: PortalClient):
ds_2 = snowflake_client.add_dataset("optimization", "Test opt dataset")

with pytest.raises(PortalRequestError, match="does not match destination type"):
ds_2.copy_from(ds_1.id)
ds_2.copy_records_from(ds_1.id)
93 changes: 90 additions & 3 deletions qcportal/qcportal/dataset_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1762,27 +1762,111 @@ def uninvalidate_records(

return ret

def copy_from(
def copy_entries_from(
self,
source_dataset_id: int,
entry_names: Optional[Union[str, Iterable[str]]] = None,
):
"""
Copies entries from another dataset into this one
If entries already exist with the same name, an exception is raised.
Parameters
----------
source_dataset_id
The ID of the dataset to copy entries from
entry_names
Names of the entries to copy. If not provided, all entries will be copied.
"""

self.assert_is_not_view()
self.assert_online()

body_data = DatasetCopyFromBody(
source_dataset_id=source_dataset_id,
entry_names=make_list(entry_names),
copy_entries=True,
)

self._client.make_request(
"post", f"api/v1/datasets/{self.dataset_type}/{self.id}/copy_from", None, body=body_data
)

self.fetch_entry_names()

def copy_specifications_from(
self,
source_dataset_id: int,
specification_names: Optional[Union[str, Iterable[str]]] = None,
):
"""
Copies specifications from another dataset into this one
If specifications already exist with the same name, an exception is raised.
Parameters
----------
source_dataset_id
The ID of the dataset to copy entries from
specification_names
Names of the specifications to copy. If not provided, all specifications will be copied.
"""
self.assert_is_not_view()
self.assert_online()

body_data = DatasetCopyFromBody(
source_dataset_id=source_dataset_id,
specification_names=make_list(specification_names),
copy_specifications=True,
)

self._client.make_request(
"post", f"api/v1/datasets/{self.dataset_type}/{self.id}/copy_from", None, body=body_data
)

self.fetch_specifications()

def copy_records_from(
self,
source_dataset_id: int,
entry_names: Optional[Union[str, Iterable[str]]] = None,
specification_names: Optional[Union[str, Iterable[str]]] = None,
):
"""
Copies records from another dataset into this one
Entries and specifications will also be copied.
If entries or specifications already exist with the same name, an exception is raised.
This does not actually fully copy records - the records will be linked to both datasets
Parameters
----------
source_dataset_id
The ID of the dataset to copy entries from
entry_names
Names of the entries to copy. If not provided, all entries will be copied.
specification_names
Names of the specifications to copy. If not provided, all specifications will be copied.
"""

self.assert_is_not_view()
self.assert_online()

body_data = DatasetCopyFromBody(
source_dataset_id=source_dataset_id,
entry_names=make_list(entry_names),
specification_names=make_list(specification_names),
copy_records=True,
)

self._client.make_request(
"post", f"api/v1/datasets/{self.dataset_type}/{self.id}/copy_records", None, body=body_data
"post", f"api/v1/datasets/{self.dataset_type}/{self.id}/copy_from", None, body=body_data
)

self.fetch_entry_names()
self.fetch_specification_names()
self.fetch_specifications()

def compile_values(
self,
Expand Down Expand Up @@ -2128,6 +2212,9 @@ class DatasetCopyFromBody(RestModelBase):
source_dataset_id: int
entry_names: Optional[List[str]] = None
specification_names: Optional[List[str]] = None
copy_entries: bool = False
copy_specifications: bool = False
copy_records: bool = False


class DatasetFetchRecordsBody(RestModelBase):
Expand Down
Loading

0 comments on commit 6f29b6f

Please sign in to comment.