Skip to content

Commit

Permalink
Never use DH cipher and remove several warnings (#30)
Browse files Browse the repository at this point in the history
  • Loading branch information
adrien-berchet authored Jun 17, 2023
1 parent 3c8c2ac commit c3d1d5f
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 24 deletions.
16 changes: 7 additions & 9 deletions morphapi/api/neuromorphorg.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
import logging
import os
from functools import partial

from morphapi.morphology.morphology import Neuron
from morphapi.paths_manager import Paths
from morphapi.utils.webqueries import connected_to_internet
from morphapi.utils.webqueries import request

logger = logging.getLogger(__name__)
request_no_ssl = partial(request, verify=False)


class NeuroMorpOrgAPI(Paths):
Expand All @@ -26,7 +24,7 @@ def __init__(self, *args, **kwargs):

# Check that neuromorpho.org is not down
try:
request_no_ssl("https://neuromorpho.org/api/health")
request("https://neuromorpho.org/api/health")
except Exception as e:
raise ConnectionError(
f"It seems that neuromorphos API is down: {e}"
Expand All @@ -40,7 +38,7 @@ def fields(self):
Fields contains the types of fields that can be used to restrict queries
"""
if self._fields is None:
self._fields = request_no_ssl(self._base_url + "/fields").json()[
self._fields = request(self._base_url + "/fields").json()[
"Neuron Fields"
]
return self._fields
Expand All @@ -53,7 +51,7 @@ def get_fields_values(self, field):
max_page = 1
values = []
while current_page < max_page:
req = request_no_ssl(
req = request(
self._base_url
+ f"/fields/{field}?&size=1000&page={current_page}"
).json()
Expand Down Expand Up @@ -100,7 +98,7 @@ def get_neurons_metadata(self, size=100, page=0, **criteria):
url += f"&size={int(size)}&page={int(page)}"

try:
req = request_no_ssl(url)
req = request(url)
neurons = req.json()
valid_url = req.ok and "error" not in neurons
except ValueError:
Expand Down Expand Up @@ -137,13 +135,13 @@ def get_neuron_by_id(self, nid):
"""
Get a neuron's metadata given it's id number
"""
return request_no_ssl(self._base_url + f"/id/{nid}").json()
return request(self._base_url + f"/id/{nid}").json()

def get_neuron_by_name(self, nname):
"""
Get a neuron's metadata given it's name
"""
return request_no_ssl(self._base_url + f"/name/{nname}").json()
return request(self._base_url + f"/name/{nname}").json()

def build_filepath(self, neuron_id):
"""
Expand Down Expand Up @@ -202,7 +200,7 @@ class is used to download neurons for other APIs
url = f"https://neuromorpho.org/dableFiles/{neuron['archive'].lower()}/{self._version}/{neuron['neuron_name']}.swc"

try:
req = request_no_ssl(url)
req = request(url)
with open(filepath, "w") as f:
f.write(req.content.decode("utf-8"))
except ValueError as exc:
Expand Down
8 changes: 4 additions & 4 deletions morphapi/morphology/morphology.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def load_from_swc(self):

self.repair_swc_file()

nrn = nm.load_neuron(self.data_file)
nrn = nm.load_morphology(self.data_file)

# Get position and radius of some
soma_pos = nrn.soma.points[0, :3]
Expand Down Expand Up @@ -247,7 +247,7 @@ def create_mesh(
pos=coords,
r=self.points["soma"].radius * soma_radius,
c=soma_color,
).computeNormals()
).compute_normals()
neurites["soma"] = soma.clone().c(soma_color)

# Create neurites actors
Expand All @@ -273,15 +273,15 @@ def create_mesh(
if actors:
neurites[ntype] = merge(
actors
).computeNormals() # .smoothMLS2D(f=0.1)
).compute_normals() # .smoothMLS2D(f=0.1)
else:
neurites[ntype] = None

# Merge actors to get the entire neuron
actors = [
act.clone() for act in neurites.values() if act is not None
]
whole_neuron = merge(actors).clean().computeNormals()
whole_neuron = merge(actors).clean().compute_normals()

# Write to cache
to_write = neurites.copy()
Expand Down
15 changes: 6 additions & 9 deletions morphapi/utils/webqueries.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,9 @@ def request(url, verify=True):

try:
_DEFAULT_CIPHERS = requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS
if not verify:
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = (
_DEFAULT_CIPHERS + ":HIGH:!DH:!aNULL"
)
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = (
_DEFAULT_CIPHERS + ":HIGH:!DH:!aNULL"
)
response = requests.get(url, verify=verify)
finally:
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = _DEFAULT_CIPHERS
Expand All @@ -48,9 +47,7 @@ def query_mouselight(query):
"You need an internet connection for API queries, sorry."
)

base_url = "https://ml-neuronbrowser.janelia.org/"

full_query = base_url + query
full_query = mouselight_base_url + query

# send the query, package the return argument as a json tree
response = requests.get(full_query)
Expand Down Expand Up @@ -108,8 +105,8 @@ def post_mouselight(url, query=None, clean=False, attempts=3):
if request is None:
raise ConnectionError(
"\n\nMouseLight API query failed with error message:\n{}.\
\nPerhaps the server is down, visit 'https://ml-neuronbrowser.janelia.org' to find out.".format(
exception
\nPerhaps the server is down, visit '{}' to find out.".format(
exception, mouselight_base_url
)
)
else:
Expand Down
3 changes: 3 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
[pytest]
addopts = --cov=morphapi

filterwarnings =
ignore:distutils Version classes are deprecated:DeprecationWarning
ignore:The hookimpl CovPlugin.pytest_.* uses old-style configuration options:pytest.PytestDeprecationWarning
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@
requirements = [
"bg_atlasapi",
"imagecodecs; python_version>='3.9'",
"neurom<4",
"neurom>=3,<4",
"numpy",
"pandas",
"pyyaml>=5.3",
"retry",
"rich",
"vedo>=2020.3.3",
"vedo>=2021.0.3",
"vtk",
]

Expand Down

0 comments on commit c3d1d5f

Please sign in to comment.