Skip to content

Commit

Permalink
Merge branch 'develop' into dependabot/npm_and_yarn/covalent_ui/webap…
Browse files Browse the repository at this point in the history
…p/ip-1.1.9
  • Loading branch information
wjcunningham7 committed Jun 10, 2024
2 parents 1e9aa96 + 50440bc commit 701d310
Show file tree
Hide file tree
Showing 63 changed files with 420 additions and 1,079 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/boilerplate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
fetch-depth: 0
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v31
uses: tj-actions/changed-files@v41
with:
files: |
**/*.py
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/requirements.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
run: python -m pip install pip-check-reqs

- name: Check extra core requirements
run: pip-extra-reqs -r werkzeug -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin
run: pip-extra-reqs -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin

- name: Check missing SDK requirements
run: >
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -217,9 +217,10 @@ jobs:
if: env.BUILD_AND_RUN_ALL
id: covalent_start
run: |
export COVALENT_ENABLE_TASK_PACKING=1
covalent db migrate
if [ "${{ matrix.backend }}" = 'dask' ] ; then
COVALENT_ENABLE_TASK_PACKING=1 covalent start -d
covalent start -d
elif [ "${{ matrix.backend }}" = 'local' ] ; then
covalent start --no-cluster -d
else
Expand Down
58 changes: 58 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,64 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [UNRELEASED]

## [0.235.1-rc.0] - 2024-06-10

### Authors

- Santosh kumar <[email protected]>
- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
- Co-authored-by: kessler-frost <[email protected]>


### Fixed

- Ignoring all errors when importing qelectrons instead of only `ImportError`

## [0.235.0-rc.0] - 2024-05-29

### Authors

- Ara Ghukasyan <[email protected]>
- Casey Jao <[email protected]>


### Changed

- Updated Slurm plugin docs to note possible SSH limitation
- Updated Slurm plugin docs to remove `sshproxy` section
- API base endpoint is now configurable from an environment variable
- Removed unused lattice attributes to reduce asset uploads

### Fixed

- Improved handling of Covalent version mismatches between client and
executor environments

### Removed

- Removed obsolete `migrate-pickled-result-object` command

### Operations

- Allow installing a specific commit sha to ease testing

## [0.234.1-rc.0] - 2024-05-10

### Authors

- Andrew S. Rosen <[email protected]>
- Sankalp Sanand <[email protected]>
- Co-authored-by: Alejandro Esquivel <[email protected]>
- Casey Jao <[email protected]>
- Co-authored-by: Santosh kumar <[email protected]>


### Fixed

- Sublattice electron function strings are now parsed correctly
- The keys of dictionary inputs to electrons no longer need be strings.
- Fixed inaccuracies in task packing exposed by no longer uploading null attributes upon dispatch.

### Operations

- Fixed nightly workflow's calling of other workflows.
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.234.0-rc.0
0.235.1-rc.0
3 changes: 2 additions & 1 deletion covalent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@
from ._workflow.electron import wait # nopycln: import
from .executor.utils import get_context # nopycln: import

with contextlib.suppress(ImportError):
with contextlib.suppress(Exception):
# try to load qelectron modules
from ._workflow.qelectron import qelectron # nopycln: import
from .quantum import QCluster # nopycln: import

Expand Down
10 changes: 5 additions & 5 deletions covalent/_api/apiclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(self, dispatcher_addr: str, adapter: HTTPAdapter = None, auto_raise
self.adapter = adapter
self.auto_raise = auto_raise

def prepare_headers(self, **kwargs):
def prepare_headers(self, kwargs):
extra_headers = CovalentAPIClient.get_extra_headers()
headers = kwargs.get("headers", {})
if headers:
Expand All @@ -42,7 +42,7 @@ def prepare_headers(self, **kwargs):
return headers

def get(self, endpoint: str, **kwargs):
headers = self.prepare_headers(**kwargs)
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand All @@ -62,7 +62,7 @@ def get(self, endpoint: str, **kwargs):
return r

def put(self, endpoint: str, **kwargs):
headers = self.prepare_headers()
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand All @@ -81,7 +81,7 @@ def put(self, endpoint: str, **kwargs):
return r

def post(self, endpoint: str, **kwargs):
headers = self.prepare_headers()
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand All @@ -100,7 +100,7 @@ def post(self, endpoint: str, **kwargs):
return r

def delete(self, endpoint: str, **kwargs):
headers = self.prepare_headers()
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand Down
20 changes: 15 additions & 5 deletions covalent/_dispatcher_plugins/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import tempfile
from copy import deepcopy
from functools import wraps
Expand Down Expand Up @@ -47,6 +48,9 @@
dispatch_cache_dir.mkdir(parents=True, exist_ok=True)


BASE_ENDPOINT = os.getenv("COVALENT_DISPATCH_BASE_ENDPOINT", "/api/v2/dispatches")


def get_redispatch_request_body_v2(
dispatch_id: str,
staging_dir: str,
Expand Down Expand Up @@ -540,10 +544,10 @@ def register_manifest(
dispatcher_addr = format_server_url()

stripped = strip_local_uris(manifest) if push_assets else manifest
endpoint = "/api/v2/dispatches"
endpoint = BASE_ENDPOINT

if parent_dispatch_id:
endpoint = f"{endpoint}/{parent_dispatch_id}/subdispatches"
endpoint = f"{BASE_ENDPOINT}/{parent_dispatch_id}/sublattices"

r = APIClient(dispatcher_addr).post(endpoint, data=stripped.model_dump_json())
r.raise_for_status()
Expand Down Expand Up @@ -596,15 +600,15 @@ def _upload(assets: List[AssetSchema]):
number_uploaded = 0
for i, asset in enumerate(assets):
if not asset.remote_uri or not asset.uri:
app_log.debug(f"Skipping asset {i+1} out of {total}")
app_log.debug(f"Skipping asset {i + 1} out of {total}")
continue
if asset.remote_uri.startswith(local_scheme_prefix):
copy_file_locally(asset.uri, asset.remote_uri)
number_uploaded += 1
else:
_upload_asset(asset.uri, asset.remote_uri)
number_uploaded += 1
app_log.debug(f"Uploaded asset {i+1} out of {total}.")
app_log.debug(f"Uploaded asset {i + 1} out of {total}.")
app_log.debug(f"uploaded {number_uploaded} assets.")


Expand All @@ -615,6 +619,7 @@ def _upload_asset(local_uri, remote_uri):
else:
local_path = local_uri

filesize = os.path.getsize(local_path)
with open(local_path, "rb") as reader:
app_log.debug(f"uploading to {remote_uri}")
f = furl(remote_uri)
Expand All @@ -624,6 +629,11 @@ def _upload_asset(local_uri, remote_uri):
dispatcher_addr = f"{scheme}://{host}:{port}"
endpoint = str(f.path)
api_client = APIClient(dispatcher_addr)
if f.query:
endpoint = f"{endpoint}?{f.query}"

# Workaround for Requests bug when streaming from empty files
data = reader.read() if filesize < 50 else reader

r = api_client.put(endpoint, data=reader)
r = api_client.put(endpoint, headers={"Content-Length": str(filesize)}, data=data)
r.raise_for_status()
33 changes: 1 addition & 32 deletions covalent/_results_manager/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import os
import re
from datetime import datetime
from typing import TYPE_CHECKING, Any, Dict, List, Set, Union
from typing import TYPE_CHECKING, Any, Dict, List, Union

from .._shared_files import logger
from .._shared_files.config import get_config
Expand Down Expand Up @@ -516,34 +516,3 @@ def _convert_to_electron_result(self) -> Any:
"""

return self._result


def _filter_cova_decorators(function_string: str, cova_imports: Set[str]) -> str:
"""
Given a string representing a function, comment out any Covalent-related decorators.
Args
function_string: A string representation of a workflow function.
Returns:
The function string with Covalent-related decorators commented out.
"""

has_cova_decorator = False
in_decorator = 0
function_lines = function_string.split("\n")
for i in range(len(function_lines)):
line = function_lines[i].strip()
if in_decorator > 0:
function_lines[i] = f"# {function_lines[i]}"
in_decorator += line.count("(")
in_decorator -= line.count(")")
elif line.startswith("@"):
decorator_name = line.split("@")[1].split(".")[0].split("(")[0]
if decorator_name in cova_imports:
function_lines[i] = f"# {function_lines[i]}"
has_cova_decorator = True
in_decorator += line.count("(")
in_decorator -= line.count(")")

return "\n".join(function_lines) if has_cova_decorator else function_string
4 changes: 2 additions & 2 deletions covalent/_serialize/electron.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ def _get_node_custom_assets(node_attrs: dict) -> Dict[str, AssetSchema]:
def serialize_node(node_id: int, node_attrs: dict, node_storage_path) -> ElectronSchema:
meta = _serialize_node_metadata(node_attrs, node_storage_path)
assets = _serialize_node_assets(node_attrs, node_storage_path)
custom_assets = _get_node_custom_assets(node_attrs)
return ElectronSchema(id=node_id, metadata=meta, assets=assets, custom_assets=custom_assets)
assets._custom = _get_node_custom_assets(node_attrs)
return ElectronSchema(id=node_id, metadata=meta, assets=assets)


def deserialize_node(e: ElectronSchema, metadata_only: bool = False) -> dict:
Expand Down
49 changes: 2 additions & 47 deletions covalent/_serialize/lattice.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,6 @@
"workflow_function_string": AssetType.TEXT,
"doc": AssetType.TEXT,
"inputs": AssetType.TRANSPORTABLE,
"named_args": AssetType.TRANSPORTABLE,
"named_kwargs": AssetType.TRANSPORTABLE,
"cova_imports": AssetType.JSONABLE,
"lattice_imports": AssetType.TEXT,
"hooks": AssetType.JSONABLE,
}

Expand Down Expand Up @@ -112,33 +108,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets:
lat.inputs, ASSET_TYPES["inputs"], storage_path, ASSET_FILENAME_MAP["inputs"]
)

# Deprecate
named_args_asset = save_asset(
lat.named_args,
ASSET_TYPES["named_args"],
storage_path,
ASSET_FILENAME_MAP["named_args"],
)
named_kwargs_asset = save_asset(
lat.named_kwargs,
ASSET_TYPES["named_kwargs"],
storage_path,
ASSET_FILENAME_MAP["named_kwargs"],
)
cova_imports_asset = save_asset(
lat.cova_imports,
ASSET_TYPES["cova_imports"],
storage_path,
ASSET_FILENAME_MAP["cova_imports"],
)
lattice_imports_asset = save_asset(
lat.lattice_imports,
ASSET_TYPES["lattice_imports"],
storage_path,
ASSET_FILENAME_MAP["lattice_imports"],
)

# NOTE: these are actually JSONable
hooks_asset = save_asset(
lat.metadata["hooks"],
ASSET_TYPES["hooks"],
Expand All @@ -151,10 +120,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets:
workflow_function_string=workflow_func_str_asset,
doc=docstring_asset,
inputs=inputs_asset,
named_args=named_args_asset,
named_kwargs=named_kwargs_asset,
cova_imports=cova_imports_asset,
lattice_imports=lattice_imports_asset,
hooks=hooks_asset,
)

Expand All @@ -166,20 +131,12 @@ def _deserialize_lattice_assets(assets: LatticeAssets) -> dict:
)
doc = load_asset(assets.doc, ASSET_TYPES["doc"])
inputs = load_asset(assets.inputs, ASSET_TYPES["inputs"])
named_args = load_asset(assets.named_args, ASSET_TYPES["named_args"])
named_kwargs = load_asset(assets.named_kwargs, ASSET_TYPES["named_kwargs"])
cova_imports = load_asset(assets.cova_imports, ASSET_TYPES["cova_imports"])
lattice_imports = load_asset(assets.lattice_imports, ASSET_TYPES["lattice_imports"])
hooks = load_asset(assets.hooks, ASSET_TYPES["hooks"])
return {
"workflow_function": workflow_function,
"workflow_function_string": workflow_function_string,
"__doc__": doc,
"inputs": inputs,
"named_args": named_args,
"named_kwargs": named_kwargs,
"cova_imports": cova_imports,
"lattice_imports": lattice_imports,
"metadata": {
"hooks": hooks,
},
Expand All @@ -194,12 +151,10 @@ def _get_lattice_custom_assets(lat: Lattice) -> Dict[str, AssetSchema]:
def serialize_lattice(lat, storage_path: str) -> LatticeSchema:
meta = _serialize_lattice_metadata(lat)
assets = _serialize_lattice_assets(lat, storage_path)
custom_assets = _get_lattice_custom_assets(lat)
assets._custom = _get_lattice_custom_assets(lat)
tg = serialize_transport_graph(lat.transport_graph, storage_path)

return LatticeSchema(
metadata=meta, assets=assets, custom_assets=custom_assets, transport_graph=tg
)
return LatticeSchema(metadata=meta, assets=assets, transport_graph=tg)


def deserialize_lattice(model: LatticeSchema) -> Lattice:
Expand Down
Loading

0 comments on commit 701d310

Please sign in to comment.