Skip to content

Commit

Permalink
Merge pull request #239 from navis-org/drop37
Browse files Browse the repository at this point in the history
Drop python 3.7, 3.8; add python 3.11
  • Loading branch information
clbarnes authored Jan 9, 2024
2 parents 2a452c3 + c4e2a45 commit 647d688
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 15 deletions.
5 changes: 3 additions & 2 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,9 @@ jobs:
fail-fast: false
matrix:
python-version:
- '3.7'
- '3.8'
- '3.9'
- '3.10'
- '3.11'
igraph: ["igraph", "no-igraph"]
steps:
# # This cancels any such job that is still runnning
Expand All @@ -42,6 +41,8 @@ jobs:
run: pip install -e .
- run: pip install python-igraph
if: ${{ matrix.igraph == 'igraph' }}
- name: Report dependency versions
run: pip freeze -r requirements.txt
- name: Test
run: |
pytest --cov=./ --verbose
Expand Down
1 change: 1 addition & 0 deletions docs/source/whats_new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ What's new?
-
* - Next
- n/a
- BREAKING: Drop python 3.7 support.
- - :class:`pymaid.neuron_label.NeuronLabeller` added for labelling neurons
like in the CATMAID frontend.
* - 2.4.0
Expand Down
2 changes: 1 addition & 1 deletion pymaid/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def _calc_connectivity_matching_index(neuronA, neuronB, connectivity,

if vertex_score:
# We only need the columns for neuronA and neuronB
this_cn = total[[neuronA, neuronB]]
this_cn = total[[neuronA, neuronB]].astype(float)

# Get min and max between both neurons
this_max = np.max(this_cn, axis=1)
Expand Down
14 changes: 7 additions & 7 deletions pymaid/fetch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -918,9 +918,9 @@ def get_node_details(x, chunk_size=10000, convert_ts=True, remote_instance=None)
df.rename({'user': 'creator'}, axis='columns', inplace=True)

if convert_ts:
df['creation_time'] = pd.to_datetime(df.creation_time)
df['edition_time'] = pd.to_datetime(df.edition_time)
df['review_times'] = df.review_times.apply(lambda x: [pd.to_datetime(d)
df['creation_time'] = pd.to_datetime(df.creation_time, format="ISO8601")
df['edition_time'] = pd.to_datetime(df.edition_time, format="ISO8601")
df['review_times'] = df.review_times.apply(lambda x: [pd.to_datetime(d, format="ISO8601")
for d in x])

return df
Expand Down Expand Up @@ -1360,8 +1360,8 @@ def get_connector_links(x, with_tags=False, chunk_size=50,
df = df[df.connector_id.isin(x.connectors.connector_id)]

# Convert to timestamps
df['creation_time'] = pd.to_datetime(df.creation_time)
df['edition_time'] = pd.to_datetime(df.edition_time)
df['creation_time'] = pd.to_datetime(df.creation_time, format="ISO8601")
df['edition_time'] = pd.to_datetime(df.edition_time, format="ISO8601")

if with_tags:
return df, tags
Expand Down Expand Up @@ -4613,7 +4613,7 @@ def get_sampler(x=None, remote_instance=None):

# Convert timestamps
df['creation_time'] = pd.to_datetime(df.creation_time, unit='s', utc=True)
df['edition_time'] = pd.to_datetime(df.creaedition_timetion_time, unit='s', utc=True)
df['edition_time'] = pd.to_datetime(df.edition_time, unit='s', utc=True)

return df

Expand Down Expand Up @@ -4646,7 +4646,7 @@ def get_sampler_domains(sampler, remote_instance=None):

# Convert timestamps
df['creation_time'] = pd.to_datetime(df.creation_time, unit='s', utc=True)
df['edition_time'] = pd.to_datetime(df.creaedition_timetion_time, unit='s', utc=True)
df['edition_time'] = pd.to_datetime(df.edition_time, unit='s', utc=True)

return df

Expand Down
4 changes: 4 additions & 0 deletions pymaid/tests/test_pymaid.py
Original file line number Diff line number Diff line change
Expand Up @@ -698,6 +698,10 @@ def test_node_sorting(self):
self.assertIsInstance(ns.graph.node_label_sorting(self.n),
list)

@try_conditions
def test_geodesic_matrix(self):
geo = ns.geodesic_matrix(self.n)


class TestConnectivity(unittest.TestCase):
"""Test connectivity-related functions."""
Expand Down
2 changes: 1 addition & 1 deletion pymaid/user_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ def get_team_contributions(teams, neurons=None, remote_instance=None):

# Add column with just the date and make it the index
all_ts['date'] = all_ts.timestamp.values.astype('datetime64[D]')
all_ts.index = pd.to_datetime(all_ts.date)
all_ts.index = pd.to_datetime(all_ts.date, format="ISO8601")

# Fill in teams for each timestamp based on user + date
all_ts['team'] = None
Expand Down
7 changes: 3 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,13 @@
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',

'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
],
install_requires=requirements,
extras_require={'extras': ['fuzzywuzzy[speedup]~=0.17.0',
'ujson~=1.35']},
python_requires='>=3.6',
python_requires='>=3.9',
zip_safe=False
)

0 comments on commit 647d688

Please sign in to comment.