+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+# http://www.gnu.org/licenses/
+##############################################################################
+
+import os
+import sys
+from logging import info, warning, debug, getLogger, INFO, DEBUG, WARNING
+from json import loads
+from enum import Enum
+from datetime import datetime, timedelta
+from subprocess import check_output, CalledProcessError
+
+datetime_format = '%Y-%m-%dT%H:%M:%SZ'
+
+
+class Action(Enum):
+ r"""
+ Enum for GitHub event ``action``.
+ """
+ opened = 'opened'
+ reopened = 'reopened'
+ closed = 'closed'
+ labeled = 'labeled'
+ unlabeled = 'unlabeled'
+ ready_for_review = 'ready_for_review'
+ synchronize = 'synchronize'
+ review_requested = 'review_requested'
+ converted_to_draft = 'converted_to_draft'
+ submitted = 'submitted'
+
+class RevState(Enum):
+ r"""
+ Enum for GitHub event ``review_state``.
+ """
+ commented = 'commented'
+ approved = 'approved'
+ changes_requested = 'changes_requested'
+
+class ReviewDecision(Enum):
+ r"""
+ Enum for ``gh pr view`` results for ``reviewDecision``.
+ """
+ changes_requested = 'CHANGES_REQUESTED'
+ approved = 'APPROVED'
+ unclear = 'UNCLEAR'
+
+class Priority(Enum):
+ r"""
+ Enum for priority labels.
+ """
+ blocker = 'p: blocker /1'
+ critical = 'p: critical /2'
+ major = 'p: major /3'
+ minor = 'p: minor /4'
+ trivial = 'p: trivial /5'
+
+class State(Enum):
+ r"""
+ Enum for state labels.
+ """
+ positive_review = 's: positive review'
+ needs_work = 's: needs work'
+ needs_review = 's: needs review'
+ needs_info = 's: needs info'
+
+class Resolution(Enum):
+ r"""
+ Enum for resolution labels.
+ """
+ duplicate = 'r: duplicate'
+ invalid = 'r: invalid'
+ wontfix = 'r: wontfix'
+ worksforme = 'r: worksforme'
+
+def selection_list(label):
+ r"""
+ Return the selection list to which ``label`` belongs to.
+ """
+ for sel_list in [Priority, State, Resolution]:
+ for item in sel_list:
+ if label == item.value:
+ return sel_list
+ return None
+
+class GhLabelSynchronizer:
+ r"""
+ Handler for access to GitHub issue via the ``gh`` in the bash command line
+ of the GitHub runner.
+ """
+ def __init__(self, url, actor):
+ r"""
+ Python constructor sets the issue / PR url and list of active labels.
+ """
+ self._url = url
+ self._actor = actor
+ self._warning_prefix = 'Label Sync Warning:'
+ self._labels = None
+ self._author = None
+ self._draft = None
+ self._open = None
+ self._review_decision = None
+ self._reviews = None
+ self._commits = None
+ self._commit_date = None
+
+ number = os.path.basename(url)
+ self._pr = True
+ self._issue = 'pull request #%s' % number
+ if url.rfind('issue') != -1:
+ self._issue = 'issue #%s' % number
+ self._pr = False
+ info('Create label handler for %s and actor %s' % (self._issue, self._actor))
+ self.clean_warnings()
+
+ # -------------------------------------------------------------------------
+ # methods to obtain properties of the issue
+ # -------------------------------------------------------------------------
+ def is_pull_request(self):
+ r"""
+ Return if we are treating a pull request.
+ """
+ return self._pr
+
+ def reset_view(self):
+ r"""
+ Reset cache of ``gh view`` results.
+ """
+ self._labels = None
+ self._author = None
+ self._draft = None
+ self._open = None
+ self._review_decision = None
+ self._reviews = None
+ self._commits = None
+ self._commit_date = None
+
+ def rest_api(self, path_args, method=None, query=''):
+ r"""
+ Return data obtained from ``gh`` command ``api``.
+ """
+ meth = '-X GET'
+ if method:
+ meth='-X %s' % method
+ cmd = 'gh api %s -H \"Accept: application/vnd.github+json\" %s %s' % (meth, path_args, query)
+ debug('Execute command: %s' % cmd)
+ if method:
+ return check_output(cmd, shell=True)
+ return loads(check_output(cmd, shell=True))
+
+ def view(self, key):
+ r"""
+ Return data obtained from ``gh`` command ``view``.
+ """
+ issue = 'issue'
+ if self._pr:
+ issue = 'pr'
+ cmd = 'gh %s view %s --json %s' % (issue, self._url, key)
+ debug('Execute command: %s' % cmd)
+ return loads(check_output(cmd, shell=True))[key]
+
+ def is_open(self):
+ r"""
+ Return ``True`` if the issue res. PR is open.
+ """
+ if self._open is not None:
+ return self._open
+ if self.view('state') == 'OPEN':
+ self._open = True
+ else:
+ self._open = False
+ info('Issue %s is open %s' % (self._issue, self._open))
+ return self._open
+
+ def is_draft(self):
+ r"""
+ Return ``True`` if the PR is a draft.
+ """
+ if self._draft is not None:
+ return self._draft
+ if self.is_pull_request():
+ self._draft = self.view('isDraft')
+ else:
+ self._draft = False
+ info('Issue %s is draft %s' % (self._issue, self._draft))
+ return self._draft
+
+ def is_auth_team_member(self, login):
+ r"""
+ Return ``True`` if the user with given login belongs to an authorized
+ team.
+ """
+ def verify_membership(team):
+ path_args = '/orgs/sagemath/teams/%s/memberships/%s' % (team, login)
+ try:
+ res = self.rest_api(path_args)
+ if res['state'] == 'active' and res['role'] == 'member':
+ info('User %s is a member of %s' % (login, team))
+ return True
+ except CalledProcessError:
+ pass
+
+ info('User %s is not a member of %s' % (login, team))
+ return False
+
+ # check for the Triage team
+ if verify_membership('triage'):
+ return True
+
+ return False
+
+ def actor_authorized(self):
+ r"""
+ Return ``True`` if the actor belongs to an authorized team.
+ """
+ return self.is_auth_team_member(self._actor)
+
+ def clean_warnings(self):
+ r"""
+ Remove all warnings that have been posted by ``GhLabelSynchronizer``
+ more than ``warning_lifetime`` ago.
+ """
+ warning_lifetime = timedelta(minutes=5)
+ time_frame = timedelta(minutes=730) # timedelta to search for comments including 10 minutes overlap with cron-cycle
+ per_page = 100
+ today = datetime.today()
+ since = today - time_frame
+ query = '-F per_page=%s -F page={} -f since=%s' % (per_page, since.strftime(datetime_format))
+ s = self._url.split('/')
+ owner = s[3]
+ repo = s[4]
+ path_args = '/repos/%s/%s/issues/comments' % (owner, repo)
+ page = 1
+ comments = []
+ while True:
+ comments_page = self.rest_api(path_args, query=query.format(page))
+ comments += comments_page
+ if len(comments_page) < per_page:
+ break
+ page += 1
+
+ info('Cleaning warning comments since %s (total found %s)' % (since, len(comments)))
+
+ for c in comments:
+ login = c['user']['login']
+ body = c['body']
+ comment_id = c['id']
+ issue = c['issue_url'].split('/').pop()
+ created_at = c['created_at']
+ if login.startswith('github-actions'):
+ debug('github-actions comment %s created at %s on issue %s found' % (comment_id, created_at, issue))
+ if body.startswith(self._warning_prefix):
+ created = datetime.strptime(created_at, datetime_format)
+ lifetime = today - created
+ debug('github-actions %s %s is %s old' % (self._warning_prefix, comment_id, lifetime))
+ if lifetime > warning_lifetime:
+ try:
+ self.rest_api('%s/%s' % (path_args, comment_id), method='DELETE')
+ info('Comment %s on issue %s deleted' % (comment_id, issue))
+ except CalledProcessError:
+ # the comment may have been deleted by a bot running in parallel
+ info('Comment %s on issue %s has been deleted already' % (comment_id, issue))
+
+ def get_labels(self):
+ r"""
+ Return the list of labels of the issue resp. PR.
+ """
+ if self._labels is not None:
+ return self._labels
+ data = self.view('labels')
+ self._labels = [l['name'] for l in data]
+ info('List of labels for %s: %s' % (self._issue, self._labels))
+ return self._labels
+
+ def get_author(self):
+ r"""
+ Return the author of the issue resp. PR.
+ """
+ if self._author is not None:
+ return self._author
+ data = self.view('author')
+ self._author = self.view('author')['login']
+ info('Author of %s: %s' % (self._issue, self._author))
+ return self._author
+
+ def get_commits(self):
+ r"""
+ Return the list of commits of the PR.
+ """
+ if not self.is_pull_request():
+ return None
+
+ if self._commits is not None:
+ return self._commits
+
+ self._commits = self.view('commits')
+ self._commit_date = max( com['committedDate'] for com in self._commits )
+ info('Commits until %s for %s: %s' % (self._commit_date, self._issue, self._commits))
+ return self._commits
+
+ def get_review_decision(self):
+ r"""
+ Return the reviewDecision of the PR.
+ """
+ if not self.is_pull_request():
+ return None
+
+ if self._review_decision is not None:
+ if self._review_decision == ReviewDecision.unclear:
+ return None
+ return self._review_decision
+
+ data = self.view('reviewDecision')
+ if data:
+ self._review_decision = ReviewDecision(data)
+ else:
+ # To separate a not supplied value from not cached (see https://github.com/sagemath/sage/pull/36177#issuecomment-1704022893 ff)
+ self._review_decision = ReviewDecision.unclear
+ info('Review decision for %s: %s' % (self._issue, self._review_decision.value))
+ return self._review_decision
+
+ def get_reviews(self, complete=False):
+ r"""
+ Return the list of reviews of the PR. Per default only those proper reviews
+ are returned which have been submitted after the most recent commit. Use
+ keyword ``complete`` to get them all.
+ """
+ if not self.is_pull_request():
+ return None
+
+ if self._reviews is None:
+ self._reviews = self.view('reviews')
+ debug('Reviews for %s: %s' % (self._issue, self._reviews))
+
+ if complete or not self._reviews:
+ return self._reviews
+
+ if self._commit_date is None:
+ self.get_commits()
+
+ date = self._commit_date
+ unproper_rev = RevState.commented.value
+ new_revs = [rev for rev in self._reviews if rev['submittedAt'] > date]
+ proper_new_revs = [rev for rev in new_revs if rev['state'] != unproper_rev]
+ info('Proper reviews after %s for %s: %s' % (date, self._issue, proper_new_revs))
+ return proper_new_revs
+
+ def active_partners(self, item):
+ r"""
+ Return the list of other labels from the selection list
+ of the given one that are already present on the issue / PR.
+ """
+ sel_list = type(item)
+ partners = [i for i in sel_list if i != item and i.value in self.get_labels()]
+ info('Active partners of %s: %s' % (item, partners))
+ return partners
+
+ # -------------------------------------------------------------------------
+ # methods to validate the issue state
+ # -------------------------------------------------------------------------
+ def review_comment_to_state(self):
+ r"""
+ Return a State label if the most recent review comment
+ starts with its value.
+ """
+ revs = self.get_reviews(complete=True)
+ date = max(rev['submittedAt'] for rev in revs)
+
+ for rev in revs:
+ if rev['submittedAt'] == date:
+ for stat in State:
+ body = rev['body']
+ if body.startswith(stat.value):
+ return stat
+ return None
+
+ def needs_work_valid(self):
+ r"""
+ Return ``True`` if the PR needs work. This is the case if
+ there are reviews more recent than any commit and the review
+ decision requests changes or if there is any review reqesting
+ changes.
+ """
+ revs = self.get_reviews()
+ if not revs:
+ # no proper review since most recent commit.
+ return False
+
+ ch_req = ReviewDecision.changes_requested
+ rev_dec = self.get_review_decision()
+ if rev_dec:
+ if rev_dec == ch_req:
+ info('PR %s needs work (by decision)' % self._issue)
+ return True
+ else:
+ info('PR %s doesn\'t need work (by decision)' % self._issue)
+ return False
+
+ if any(rev['state'] == ch_req.value for rev in revs):
+ info('PR %s needs work' % self._issue)
+ return True
+ info('PR %s doesn\'t need work' % self._issue)
+ return False
+
+ def positive_review_valid(self):
+ r"""
+ Return ``True`` if the PR has positive review. This is the
+ case if there are reviews more recent than any commit and the
+ review decision is approved or if there is any approved review
+ but no changes requesting one.
+ """
+ revs = self.get_reviews()
+ if not revs:
+ # no proper review since most recent commit.
+ return False
+
+ appr = ReviewDecision.approved
+ rev_dec = self.get_review_decision()
+ if rev_dec:
+ if rev_dec == appr:
+ info('PR %s has positve review (by decision)' % self._issue)
+ return True
+ else:
+ info('PR %s doesn\'t have positve review (by decision)' % self._issue)
+ return False
+
+ if all(rev['state'] == appr.value for rev in revs):
+ info('PR %s has positve review' % self._issue)
+ return True
+ info('PR %s doesn\'t have positve review' % self._issue)
+ return False
+
+ def needs_review_valid(self):
+ r"""
+ Return ``True`` if the PR needs review. This is the case if
+ all proper reviews are older than the youngest commit.
+ """
+ if self.is_draft():
+ return False
+
+ if self.needs_work_valid():
+ info('PR %s already under review (needs work)' % self._issue)
+ return False
+
+ if self.positive_review_valid():
+ info('PR %s already reviewed' % self._issue)
+ return False
+
+ info('PR %s needs review' % self._issue)
+ return True
+
+ def approve_allowed(self):
+ r"""
+ Return if the actor has permission to approve this PR.
+ """
+ revs = self.get_reviews()
+ revs = [rev for rev in revs if rev['author']['login'] != self._actor]
+ ch_req = ReviewDecision.changes_requested
+ if any(rev['state'] == ch_req.value for rev in revs):
+ info('PR %s can\'t be approved by %s since others reqest changes' % (self._issue, self._actor))
+ return False
+
+ return self.actor_valid()
+
+ def actor_valid(self):
+ r"""
+ Return if the actor has permission to approve this PR.
+ """
+ author = self.get_author()
+
+ if author != self._actor:
+ info('PR %s can be approved by %s' % (self._issue, self._actor))
+ return True
+
+ revs = self.get_reviews()
+ revs = [rev for rev in revs if rev['author']['login'] != 'github-actions']
+ if not revs:
+ info('PR %s can\'t be approved by the author %s since no other person reviewed it' % (self._issue, self._actor))
+ return False
+
+ coms = self.get_commits()
+ authors = sum(com['authors'] for com in coms)
+ authors = [auth for auth in authors if not auth['login'] in (self._actor, 'github-actions')]
+ if not authors:
+ info('PR %s can\'t be approved by the author %s since no other person commited to it' % (self._issue, self._actor))
+ return False
+
+ info('PR %s can be approved by the author %s as co-author' % (self._issue, self._actor))
+ return True
+
+ # -------------------------------------------------------------------------
+ # methods to change the issue
+ # -------------------------------------------------------------------------
+ def gh_cmd(self, cmd, arg, option):
+ r"""
+ Perform a system call to ``gh`` for ``cmd`` to an isuue resp. PR.
+ """
+ issue = 'issue'
+ if self._pr:
+ issue = 'pr'
+ cmd_str = 'gh %s %s %s %s "%s"' % (issue, cmd, self._url, option, arg)
+ debug('Execute command: %s' % cmd_str)
+ ex_code = os.system(cmd_str)
+ if ex_code:
+ warning('Execution of %s failed with exit code: %s' % (cmd_str, ex_code))
+
+ def edit(self, arg, option):
+ r"""
+ Perform a system call to ``gh`` to edit an issue resp. PR.
+ """
+ self.gh_cmd('edit', arg, option)
+
+ def mark_as_ready(self):
+ r"""
+ Perform a system call to ``gh`` to mark a PR as ready for review.
+ """
+ self.gh_cmd('ready', '', '')
+
+ def review(self, arg, text):
+ r"""
+ Perform a system call to ``gh`` to review a PR.
+ """
+ self.gh_cmd('review', arg, '-b \"%s\"' % text)
+
+ def approve(self):
+ r"""
+ Approve the PR by the actor.
+ """
+ self.review('--approve', '%s approved this PR' % self._actor)
+ info('PR %s approved by %s' % (self._issue, self._actor))
+
+ def request_changes(self):
+ r"""
+ Request changes for this PR by the actor.
+ """
+ self.review('--request-changes', '%s requested changes for this PR' % self._actor)
+ info('Changes requested for PR %s by %s' % (self._issue, self._actor))
+
+ def review_comment(self, text):
+ r"""
+ Add a review comment.
+ """
+ self.review('--comment', text)
+ info('Add review comment for PR %s: %s' % (self._issue, text))
+
+ def add_comment(self, text):
+ r"""
+ Perform a system call to ``gh`` to add a comment to an issue or PR.
+ """
+ self.gh_cmd('comment', text, '-b')
+ info('Add comment to %s: %s' % (self._issue, text))
+
+ def add_warning(self, text):
+ r"""
+ Perform a system call to ``gh`` to add a warning to an issue or PR.
+ """
+ self.add_comment('%s %s' % (self._warning_prefix, text))
+
+ def add_label(self, label):
+ r"""
+ Add the given label to the issue or PR.
+ """
+ if not label in self.get_labels():
+ self.edit(label, '--add-label')
+ info('Add label to %s: %s' % (self._issue, label))
+
+ def add_default_label(self, item):
+ r"""
+ Add the given label if there is no active partner.
+ """
+ if not self.active_partners(item):
+ self.add_label(item.value)
+
+ def select_label(self, item):
+ r"""
+ Add the given label and remove all others.
+ """
+ self.add_label(item.value)
+ sel_list = type(item)
+ for other in sel_list:
+ if other != item:
+ self.remove_label(other.value)
+
+ def remove_label(self, label):
+ r"""
+ Remove the given label from the issue or PR of the handler.
+ """
+ if label in self.get_labels():
+ self.edit(label, '--remove-label')
+ info('Remove label from %s: %s' % (self._issue, label))
+
+ def reject_label_addition(self, item):
+ r"""
+ Post a comment that the given label can not be added and select
+ a corresponding other one.
+ """
+ if not self.is_pull_request():
+ self.add_warning('Label *%s* can not be added to an issue. Please use it on the corresponding PR' % item.value)
+ elif item is State.needs_review:
+ self.add_warning('Label *%s* can not be added, since there are unresolved reviews' % item.value)
+ else:
+ self.add_warning('Label *%s* can not be added. Please use the GitHub review functionality' % item.value)
+ self.remove_label(item.value)
+ return
+
+ def reject_label_removal(self, item):
+ r"""
+ Post a comment that the given label can not be removed and select
+ a corresponding other one.
+ """
+ if type(item) == State:
+ sel_list = 'state'
+ else:
+ sel_list = 'priority'
+ self.add_warning('Label *%s* can not be removed. Please add the %s-label which should replace it' % (item.value, sel_list))
+ self.add_label(item.value)
+ return
+
+ # -------------------------------------------------------------------------
+ # methods to act on events
+ # -------------------------------------------------------------------------
+ def on_label_add(self, label):
+ r"""
+ Check if the given label belongs to a selection list. If so, remove
+ all other labels of that list. In case of a state label reviews are
+ booked accordingly.
+ """
+ sel_list = selection_list(label)
+ if not sel_list:
+ return
+
+ item = sel_list(label)
+ if label not in self.get_labels():
+ # this is possible if two labels of the same selection list
+ # have been added in one step (via multiple selection in the
+ # pull down menue). In this case `label` has been removed
+ # on the `on_label_add` of the first of the two labels
+ partn = self.active_partners(item)
+ if partn:
+ self.add_warning('Label *%s* can not be added due to *%s*!' % (label, partn[0].value))
+ else:
+ warning('Label %s of %s not found!' % (label, self._issue))
+ return
+
+ if sel_list is State:
+ if not self.is_pull_request():
+ if item != State.needs_info:
+ self.reject_label_addition(item)
+ return
+
+ if item == State.needs_review:
+ if self.needs_review_valid():
+ # here we come for example after a sequence:
+ # needs review -> needs info -> needs review
+ pass
+ elif self.is_draft():
+ self.mark_as_ready()
+ else:
+ self.reject_label_addition(item)
+ return
+
+ if item == State.needs_work:
+ if self.needs_work_valid():
+ # here we come for example after a sequence:
+ # needs work -> needs info -> needs work
+ pass
+ elif not self.is_draft():
+ self.request_changes()
+ else:
+ self.reject_label_addition(item)
+ return
+
+ if item == State.positive_review:
+ if self.positive_review_valid():
+ # here we come for example after a sequence:
+ # positive review -> needs info -> positive review
+ pass
+ elif self.approve_allowed():
+ self.approve()
+ else:
+ self.reject_label_addition(item)
+ return
+
+ if sel_list is Resolution:
+ self.remove_all_labels_of_sel_list(Priority)
+
+ for other in sel_list:
+ if other != item:
+ self.remove_label(other.value)
+
+ def on_label_removal(self, label):
+ r"""
+ Check if the given label belongs to a selection list. If so, the
+ removement is rejected and a comment is posted to instead add a
+ replacement for ``label`` from the list. Exceptions are State labels
+ on issues and State.needs_info on a PR.
+ """
+ sel_list = selection_list(label)
+ if not sel_list:
+ return
+
+ item = sel_list(label)
+ if sel_list is State:
+ if self.is_pull_request():
+ if item != State.needs_info:
+ self.reject_label_removal(item)
+ elif sel_list is Priority:
+ self.reject_label_removal(item)
+ return
+
+ def on_review_comment(self):
+ r"""
+ Check if the text of the most recent review begins with a
+ specific label name. In this case, simulate the corresponding
+ label addition. This feature is needed for people who don't
+ have permission to add labels (i.e. aren't a member of the
+ Triage team).
+ """
+ rev_state = self.review_comment_to_state()
+ if rev_state in (State.needs_info, State.needs_review):
+ self.select_label(rev_state)
+ self.run(Action.labeled, label=rev_state.value)
+
+ def remove_all_labels_of_sel_list(self, sel_list):
+ r"""
+ Remove all labels of given selection list.
+ """
+ for item in sel_list:
+ self.remove_label(item.value)
+
+ def run(self, action, label=None, rev_state=None):
+ r"""
+ Run the given action.
+ """
+ self.reset_view() # this is just needed for run_tests
+
+ if action is Action.opened and self.is_pull_request():
+ if not self.is_draft():
+ self.add_default_label(State.needs_review)
+
+ if action in (Action.closed, Action.reopened, Action.converted_to_draft):
+ self.remove_all_labels_of_sel_list(State)
+
+ if action is Action.labeled:
+ self.on_label_add(label)
+
+ if action is Action.unlabeled:
+ self.on_label_removal(label)
+
+ if action in (Action.ready_for_review, Action.synchronize):
+ if self.needs_review_valid():
+ self.select_label(State.needs_review)
+
+ if action is Action.review_requested:
+ self.select_label(State.needs_review)
+
+ if action is Action.submitted:
+ rev_state = RevState(rev_state)
+ if rev_state is RevState.approved:
+ if self.actor_authorized() and self.positive_review_valid():
+ self.select_label(State.positive_review)
+
+ if rev_state is RevState.changes_requested:
+ if self.needs_work_valid():
+ self.select_label(State.needs_work)
+
+ if rev_state is RevState.commented:
+ self.on_review_comment()
+
+ def run_tests(self):
+ r"""
+ Simulative run over all posibble events.
+
+ This is not intended to validate all functionality. It just
+ tests for bugs on invoking the methods. The result in the
+ issue or PR depends on timing. Note that the GitHub action runner
+ may run in parallel on the triggered events possibly on an other
+ version of the code.
+ """
+ self.add_comment('Starting tests for sync_labels')
+ for action in Action:
+ self.add_comment('Test action %s' % action.value)
+ if action in (Action.labeled, Action.unlabeled):
+ for stat in State:
+ if action is Action.labeled:
+ self.add_label(stat.value)
+ else:
+ self.remove_label(stat.value)
+ self.run(action, label=stat.value)
+ for prio in Priority:
+ if action is Action.labeled:
+ self.add_label(prio.value)
+ else:
+ self.remove_label(prio.value)
+ self.run(action, label=prio.value)
+ res = Resolution.worksforme
+ if action is Action.labeled:
+ self.add_label(res.value)
+ self.run(action, label=prio.value)
+ elif action == Action.submitted and self.is_pull_request():
+ for rev_stat in RevState:
+ if rev_stat is RevState.approved:
+ self.approve()
+ self.run(action, rev_state=rev_stat.value)
+ elif rev_stat is RevState.changes_requested:
+ self.request_changes()
+ self.run(action, rev_state=rev_stat.value)
+ elif rev_stat is RevState.commented:
+ for stat in State:
+ self.review_comment(stat.value)
+ self.run(action, rev_state=rev_stat.value)
+ elif self.is_pull_request():
+ self.run(action)
+
+
+###############################################################################
+# Main
+###############################################################################
+last_arg = None
+run_tests = False
+default_actor = 'sagetrac-github-bot'
+cmdline_args = sys.argv[1:]
+num_args = len(cmdline_args)
+
+if num_args:
+ last_arg = cmdline_args[num_args-1]
+
+if last_arg in ('-t', '--test'):
+ getLogger().setLevel(DEBUG)
+ cmdline_args.pop()
+ run_tests = True
+elif last_arg in ('-d', '--debug'):
+ getLogger().setLevel(DEBUG)
+ cmdline_args.pop()
+elif last_arg in ('-i', '--info'):
+ getLogger().setLevel(INFO)
+ cmdline_args.pop()
+elif last_arg in ('-w', '--warning'):
+ getLogger().setLevel(INFO)
+ info('cmdline_args (%s) %s' % (num_args, cmdline_args))
+ getLogger().setLevel(WARNING)
+ cmdline_args.pop()
+else:
+ getLogger().setLevel(DEBUG)
+
+num_args = len(cmdline_args)
+info('cmdline_args (%s) %s' % (num_args, cmdline_args))
+
+if run_tests and num_args in (1,2):
+ if num_args == 2:
+ url, actor = cmdline_args
+ else:
+ url, = cmdline_args
+ actor = default_actor
+
+ info('url: %s' % url)
+ info('actor: %s' % actor)
+
+ gh = GhLabelSynchronizer(url, actor)
+ gh.run_tests()
+
+elif num_args == 5:
+ action, url, actor, label, rev_state = cmdline_args
+ action = Action(action)
+
+ info('action: %s' % action)
+ info('url: %s' % url)
+ info('actor: %s' % actor)
+ info('label: %s' % label)
+ info('rev_state: %s' % rev_state)
+
+ gh = GhLabelSynchronizer(url, actor)
+ gh.run(action, label=label, rev_state=rev_state)
+
+elif num_args == 1:
+ url, = cmdline_args
+
+ info('url: %s' % url)
+
+ gh = GhLabelSynchronizer(url, default_actor)
+
+else:
+ print('Need 5 arguments to synchronize: action, url, actor, label, rev_state')
+ print('Need 1 argument to clean warning comments: url')
+ print('Need 1 argument to run tests: url')
+ print('The following options may be appended:')
+ print(' -t --test to run the test suite')
+ print(' -i --info to set the log-level to INFO')
+ print(' -d --debug to set the log-level to DEBUG (default)')
+ print(' -w --warning to set the log-level to WARNING')
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 814e410d29f..f8b6b52a890 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -3,6 +3,9 @@ name: Build & Test
on:
pull_request:
push:
+ branches: ['**']
+ # Ignore pushes on tags to prevent two uploads of codecov reports
+ tags-ignore: ['**']
workflow_dispatch:
# Allow to run manually
inputs:
@@ -21,60 +24,140 @@ concurrency:
cancel-in-progress: true
jobs:
+ get_ci_fixes:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
+ - name: Store CI fixes in upstream artifact
+ run: |
+ mkdir -p upstream
+ git format-patch --stdout test_base > upstream/ci_fixes.patch
+ - uses: actions/upload-artifact@v3
+ with:
+ path: upstream
+ name: upstream
+
build:
runs-on: ubuntu-latest
- container: ghcr.io/sagemath/sage/sage-docker-${{ github.event.inputs.platform || 'ubuntu-focal-standard' }}-with-targets:${{ github.event.inputs.docker_tag || 'dev'}}
+ container: ghcr.io/sagemath/sage/sage-${{ github.event.inputs.platform || 'ubuntu-focal-standard' }}-with-targets:${{ github.event.inputs.docker_tag || 'dev'}}
+ needs: [get_ci_fixes]
steps:
- name: Checkout
id: checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- - name: Prepare
+ - name: Update system packages
id: prepare
run: |
- # Install test tools.
- if apt-get update && apt-get install -y git python3-venv; then
- # Debian-specific temporary code:
- # Installation of python3-venv can be removed as soon as a
- # base image with a release including #33822 is available
- :
- else
- export PATH="build/bin:$PATH"
- eval $(sage-print-system-package-command auto update)
- eval $(sage-print-system-package-command auto --spkg --yes --no-install-recommends install git)
+ export PATH="build/bin:$PATH"
+ eval $(sage-print-system-package-command auto update)
+ eval $(sage-print-system-package-command auto --spkg --yes --no-install-recommends install git)
+
+ - name: Add prebuilt tree as a worktree
+ id: worktree
+ run: |
+ set -ex
+ git config --global user.email "ci-sage@example.com"
+ git config --global user.name "Build & Test workflow"
+ git config --global --add safe.directory $(pwd)
+ # If actions/checkout downloaded our source tree using the GitHub REST API
+ # instead of with git (because do not have git installed in our image),
+ # we first make the source tree a repo.
+ if [ ! -d .git ]; then git init && git add -A && git commit --quiet -m "new"; fi
+ # Tag this state of the source tree "new". This is what we want to build and test.
+ git tag -f new
+ # Our container image contains a source tree in /sage with a full build of Sage.
+ # But /sage is not a git repository.
+ # We make /sage a worktree whose index is at tag "new".
+ # We then commit the current sources and set the tag "old". (This keeps all mtimes unchanged.)
+ # Then we update worktree and index with "git reset --hard new".
+ # (This keeps mtimes of unchanged files unchanged and mtimes of changed files newer than unchanged files.)
+ # Finally we reset the index to "old". (This keeps all mtimes unchanged.)
+ # The changed files now show up as uncommitted changes.
+ # The final "git add -N" makes sure that files that were added in "new" do not show
+ # as untracked files, which would be removed by "git clean -fx".
+ git worktree add --detach worktree-image
+ rm -rf /sage/.git && mv worktree-image/.git /sage/
+ rm -rf worktree-image && ln -s /sage worktree-image
+ if [ ! -f worktree-image/.gitignore ]; then cp .gitignore worktree-image/; fi
+ (cd worktree-image && git add -A && git commit --quiet --allow-empty -m "old" -a && git tag -f old && git reset --hard new && git reset --quiet old && git add -N . && git status)
+
+ - name: Download upstream artifact
+ uses: actions/download-artifact@v3
+ with:
+ path: upstream
+ name: upstream
+
+ - name: Apply CI fixes from sagemath/sage
+ # After applying the fixes, make sure all changes are marked as uncommitted changes.
+ run: |
+ if [ -r upstream/ci_fixes.patch ]; then
+ (cd worktree-image && git commit -q -m "current changes" --allow-empty -a && git am; git reset --quiet old; git add -N .) < upstream/ci_fixes.patch
fi
- # Reuse built SAGE_LOCAL contained in the Docker image
- ./bootstrap
- ./configure --enable-build-as-root --prefix=/sage/local --with-sage-venv --enable-editable --enable-download-from-upstream-url
-
+
+ - name: Incremental build, test changed files (sage -t --new)
+ id: incremental
+ run: |
+ # Now re-bootstrap and build. The build is incremental because we were careful with the timestamps.
+ # We run tests with "sage -t --new"; this only tests the uncommitted changes.
+ ./bootstrap && make build && ./sage -t --new -p2
+ working-directory: ./worktree-image
+ env:
+ MAKE: make -j2 --output-sync=recurse
+ SAGE_NUM_THREADS: 2
+
- name: Build and test modularized distributions
- if: always() && steps.prepare.outcome == 'success'
+ if: always() && steps.worktree.outcome == 'success'
run: make V=0 tox && make pypi-wheels
+ working-directory: ./worktree-image
env:
- MAKE: make -j2
+ MAKE: make -j2 --output-sync=recurse
SAGE_NUM_THREADS: 2
- name: Set up node to install pyright
- if: always() && steps.prepare.outcome == 'success'
+ if: always() && steps.worktree.outcome == 'success'
uses: actions/setup-node@v3
with:
node-version: '12'
- name: Install pyright
- if: always() && steps.prepare.outcome == 'success'
+ if: always() && steps.worktree.outcome == 'success'
# Fix to v232 due to bug https://github.com/microsoft/pyright/issues/3239
run: npm install -g pyright@1.1.232
- name: Static code check with pyright
- if: always() && steps.prepare.outcome == 'success'
- run: pyright
-
+ if: always() && steps.worktree.outcome == 'success'
+ run: pyright
+ working-directory: ./worktree-image
+
+ - name: Clean (fallback to non-incremental)
+ id: clean
+ if: always() && steps.worktree.outcome == 'success' && steps.incremental.outcome != 'success'
+ run: |
+ set -ex
+ ./bootstrap && make doc-clean doc-uninstall sagelib-clean && git clean -fx src/sage && ./config.status
+ working-directory: ./worktree-image
+ env:
+ MAKE: make -j2
+ SAGE_NUM_THREADS: 2
+
- name: Build
+ # This step is needed because building the modularized distributions installs some optional packages,
+ # so the editable install of sagelib needs to build the corresponding optional extension modules.
id: build
- if: always() && steps.prepare.outcome == 'success'
- run: make build
+ if: always() && (steps.incremental.outcome == 'success' || steps.clean.outcome == 'success')
+ run: |
+ make build
+ working-directory: ./worktree-image
env:
- MAKE: make -j2
+ MAKE: make -j2 --output-sync=recurse
SAGE_NUM_THREADS: 2
- name: Pytest
@@ -82,17 +165,17 @@ jobs:
run: |
../sage -python -m pip install coverage pytest-xdist
../sage -python -m coverage run -m pytest -c tox.ini --doctest-modules || true
- working-directory: ./src
+ working-directory: ./worktree-image/src
env:
# Increase the length of the lines in the "short summary"
COLUMNS: 120
- - name: Test
+ - name: Test all files (sage -t --all --long)
if: always() && steps.build.outcome == 'success'
run: |
../sage -python -m pip install coverage
- ../sage -python -m coverage run ./bin/sage-runtests --all -p2
- working-directory: ./src
+ ../sage -python -m coverage run ./bin/sage-runtests --all --long -p2 --random-seed=286735480429121101562228604801325644303
+ working-directory: ./worktree-image/src
- name: Prepare coverage results
if: always() && steps.build.outcome == 'success'
@@ -100,9 +183,10 @@ jobs:
./venv/bin/python3 -m coverage combine src/.coverage/
./venv/bin/python3 -m coverage xml
find . -name *coverage*
-
+ working-directory: ./worktree-image
+
- name: Upload coverage to codecov
if: always() && steps.build.outcome == 'success'
- uses: codecov/codecov-action@v2
+ uses: codecov/codecov-action@v3
with:
- files: ./coverage.xml
+ files: ./worktree-image/coverage.xml
diff --git a/.github/workflows/ci-conda.yml b/.github/workflows/ci-conda.yml
index bc1f1c5a634..9695dc7ef9a 100644
--- a/.github/workflows/ci-conda.yml
+++ b/.github/workflows/ci-conda.yml
@@ -6,6 +6,14 @@ on:
- '*'
branches:
- 'public/build/**-runci'
+ pull_request:
+ types:
+ # Defaults
+ - opened
+ - synchronize
+ - reopened
+ # When a CI label is added
+ - labeled
workflow_dispatch:
# Allow to run manually
@@ -18,16 +26,36 @@ jobs:
test:
name: Conda
runs-on: ${{ matrix.os }}
-
+
+ # Run on push, workflow dispatch and when certain labels are added or are present
+ if: |
+ github.event_name != 'pull_request' ||
+ ((github.event.action != 'labeled' &&
+ (contains(github.event.pull_request.labels.*.name, 'c: packages: standard') ||
+ contains(github.event.pull_request.labels.*.name, 'c: packages: optional') ||
+ contains(github.event.pull_request.labels.*.name, 's: run conda ci'))) ||
+ (github.event.action == 'labeled' &&
+ (github.event.label.name == 'c: packages: optional' ||
+ github.event.label.name == 'c: packages: standard' ||
+ github.event.label.name == 's: run conda ci')))
+
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest]
- python: [3.8, 3.9]
- conda-env: [environment, environment-optional]
+ python: ['3.9', '3.10', '3.11']
+ # Optional environment is disabled for now as its not yet working
+ # environment: [environment, environment-optional]
+ conda-env: [environment]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
+
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
- name: Check for Miniconda
id: check_conda
@@ -45,7 +73,7 @@ jobs:
run: ./bootstrap-conda
- name: Cache conda packages
- uses: actions/cache@v2
+ uses: actions/cache@v3
with:
path: ~/conda_pkgs_dir
key:
@@ -73,27 +101,34 @@ jobs:
run: |
./bootstrap
echo "::add-matcher::.github/workflows/configure-systempackage-problem-matcher.json"
- ./configure --enable-build-as-root --with-python=$CONDA_PREFIX/bin/python --prefix=$CONDA_PREFIX $(for pkg in $(./sage -package list :standard: --has-file spkg-configure.m4 --has-file distros/conda.txt); do echo --with-system-$pkg=force; done)
+ ./configure --enable-build-as-root --with-python=$CONDA_PREFIX/bin/python --prefix=$CONDA_PREFIX $(for pkg in $(./sage -package list :standard: --has-file spkg-configure.m4 --has-file distros/conda.txt --exclude rpy2); do echo --with-system-$pkg=force; done)
echo "::remove-matcher owner=configure-system-package-warning::"
echo "::remove-matcher owner=configure-system-package-error::"
- name: Build
shell: bash -l {0}
run: |
- pip install --no-build-isolation -v -v -e ./pkgs/sage-conf ./pkgs/sage-setup
- pip install --no-build-isolation -v -v -e ./src
+ # Use --no-deps and pip check below to verify that all necessary dependencies are installed via conda.
+ pip install --no-build-isolation --no-deps -v -v -e ./pkgs/sage-conf ./pkgs/sage-setup
+ pip install --no-build-isolation --no-deps -v -v -e ./src
env:
SAGE_NUM_THREADS: 2
+
+ - name: Verify dependencies
+ if: success() || failure()
+ shell: bash -l {0}
+ run: pip check
- name: Test
+ if: success() || failure()
shell: bash -l {0}
run: ./sage -t --all -p0
- name: Print logs
+ if: always()
run: |
for file in $(find . -type f -name "*.log"); do
echo "::group::$file"
cat "$file"
echo "::endgroup::"
done
- if: always()
diff --git a/.github/workflows/ci-cygwin-standard.yml b/.github/workflows/ci-cygwin-standard.yml
index 53c674028ef..326dd62490e 100644
--- a/.github/workflows/ci-cygwin-standard.yml
+++ b/.github/workflows/ci-cygwin-standard.yml
@@ -39,7 +39,7 @@ jobs:
with:
stage: ii-b
previous_stages: i-*
- targets: singular maxima gap pari gfan palp flintqs arb ecm givaro
+ targets: singular maxima gap pari gfan palp arb ecm givaro
needs: [cygwin-stage-i-a, cygwin-stage-i-b]
cygwin-stage-ii-c:
diff --git a/.github/workflows/ci-linux-incremental.yml b/.github/workflows/ci-linux-incremental.yml
new file mode 100644
index 00000000000..a4aa9ae99c7
--- /dev/null
+++ b/.github/workflows/ci-linux-incremental.yml
@@ -0,0 +1,132 @@
+name: CI Linux incremental
+
+## This GitHub Actions workflow runs SAGE_ROOT/tox.ini with select environments,
+## whenever a GitHub pull request is opened or synchronized in a repository
+## where GitHub Actions are enabled.
+##
+## It builds and checks some sage spkgs as defined in TARGETS.
+##
+## A job succeeds if there is no error.
+##
+## The build is run with "make V=0", so the build logs of individual packages are suppressed.
+##
+## At the end, all package build logs that contain an error are printed out.
+##
+## After all jobs have finished (or are canceled) and a short delay,
+## tar files of all logs are made available as "build artifacts".
+
+on:
+ pull_request:
+ types:
+ # Defaults
+ - opened
+ - synchronize
+ - reopened
+ # When a CI label is added
+ - labeled
+ workflow_dispatch:
+
+concurrency:
+ # Cancel previous runs of this workflow for the same branch
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+permissions:
+ packages: write
+
+jobs:
+
+ changed_files:
+ runs-on: ubuntu-latest
+ name: List changed packages
+ outputs:
+ uninstall_targets: ${{ steps.build-targets.outputs.uninstall_targets }}
+ build_targets: ${{ steps.build-targets.outputs.build_targets }}
+ steps:
+ - uses: actions/checkout@v4
+ - name: Get all packages that have changed
+ id: changed-packages
+ uses: tj-actions/changed-files@v38
+ with:
+ files_yaml: |
+ configures:
+ - 'build/pkgs/*/spkg-configure.m4'
+ pkgs:
+ - 'build/pkgs/**'
+ - 'pkgs/**'
+ - name: Determine targets to build
+ id: build-targets
+ run: |
+ echo "uninstall_targets=$(echo $(for a in '' ${{ steps.changed-packages.outputs.configures_all_changed_files }}; do echo $a | sed -E 's,build/pkgs/([_.a-z0-9]*)/spkg-configure[.]m4 *,\1-uninstall,'; done | sort -u))" >> $GITHUB_OUTPUT
+ echo "build_targets=$(echo $(for a in '' ${{ steps.changed-packages.outputs.pkgs_all_changed_files }}; do echo $a | sed -E 's,-,_,g;s,(build/)?pkgs/([-_.a-z0-9]*)/[^ ]* *,\2-ensure,;'; done | sort -u))" >> $GITHUB_OUTPUT
+ cat $GITHUB_OUTPUT
+
+ test:
+ needs: [changed_files]
+ if: |
+ github.event_name != 'pull_request' ||
+ ((github.event.action != 'labeled' &&
+ (contains(github.event.pull_request.labels.*.name, 'c: packages: standard') ||
+ contains(github.event.pull_request.labels.*.name, 'c: packages: optional'))) ||
+ (github.event.action == 'labeled' &&
+ (github.event.label.name == 'c: packages: optional' ||
+ github.event.label.name == 'c: packages: standard')))
+ uses: ./.github/workflows/docker.yml
+ with:
+ # Build incrementally from published Docker image
+ incremental: true
+ free_disk_space: true
+ from_docker_repository: ghcr.io/sagemath/sage/
+ from_docker_target: "with-targets"
+ from_docker_tag: "dev"
+ docker_targets: "with-targets"
+ targets: "${{needs.changed_files.outputs.uninstall_targets}} ${{needs.changed_files.outputs.build_targets}} build doc-html ptest"
+ tox_system_factors: >-
+ ["ubuntu-focal",
+ "ubuntu-jammy",
+ "ubuntu-mantic",
+ "debian-bullseye",
+ "debian-bookworm",
+ "fedora-30",
+ "fedora-38",
+ "gentoo-python3.11",
+ "debian-bullseye-i386"]
+ tox_packages_factors: >-
+ ["standard",
+ "minimal"]
+ docker_push_repository: ghcr.io/${{ github.repository }}/
+
+ site:
+ needs: [changed_files]
+ if: |
+ github.event_name != 'pull_request' ||
+ ((github.event.action != 'labeled' &&
+ (contains(github.event.pull_request.labels.*.name, 'c: packages: standard') ||
+ contains(github.event.pull_request.labels.*.name, 'c: packages: optional'))) ||
+ (github.event.action == 'labeled' &&
+ (github.event.label.name == 'c: packages: optional' ||
+ github.event.label.name == 'c: packages: standard')))
+ uses: ./.github/workflows/docker.yml
+ with:
+ # Build incrementally from published Docker image
+ incremental: true
+ free_disk_space: true
+ from_docker_repository: ghcr.io/sagemath/sage/
+ from_docker_target: "with-targets"
+ from_docker_tag: "dev"
+ docker_targets: "with-targets"
+ targets: "${{needs.changed_files.outputs.uninstall_targets}} ${{needs.changed_files.outputs.build_targets}} build doc-html ptest"
+ # Only test systems with a usable system python (>= 3.9)
+ tox_system_factors: >-
+ ["ubuntu-jammy",
+ "ubuntu-mantic",
+ "debian-bullseye",
+ "debian-bookworm",
+ "fedora-33",
+ "fedora-38",
+ "gentoo-python3.11",
+ "archlinux",
+ "debian-bullseye-i386"]
+ tox_packages_factors: >-
+ ["standard-sitepackages"]
+ docker_push_repository: ghcr.io/${{ github.repository }}/
diff --git a/.github/workflows/ci-linux.yml b/.github/workflows/ci-linux.yml
index aca56f9b49c..665a2a4f0b0 100644
--- a/.github/workflows/ci-linux.yml
+++ b/.github/workflows/ci-linux.yml
@@ -29,6 +29,9 @@ env:
TARGETS: build doc-html
TARGETS_OPTIONAL: ptest
+permissions:
+ packages: write
+
jobs:
standard-pre:
@@ -60,6 +63,55 @@ jobs:
["standard"]
docker_push_repository: ghcr.io/${{ github.repository }}/
+ standard-sitepackages:
+ if: ${{ success() || failure() }}
+ needs: [standard-pre]
+ uses: ./.github/workflows/docker.yml
+ with:
+ # Build incrementally from previous stage (pre)
+ incremental: true
+ free_disk_space: true
+ from_docker_repository: ghcr.io/${{ github.repository }}/
+ from_docker_target: "with-targets-pre"
+ docker_targets: "with-targets with-targets-optional"
+ # FIXME: duplicated from env.TARGETS
+ targets: build doc-html
+ targets_optional: ptest
+ tox_packages_factors: >-
+ ["standard-sitepackages"]
+ # Only test systems with a usable system python (>= 3.9)
+ tox_system_factors: >-
+ ["ubuntu-jammy",
+ "ubuntu-lunar",
+ "ubuntu-mantic",
+ "debian-bullseye",
+ "debian-bookworm",
+ "debian-trixie",
+ "debian-sid",
+ "linuxmint-21",
+ "linuxmint-21.1",
+ "linuxmint-21.2",
+ "fedora-33",
+ "fedora-34",
+ "fedora-35",
+ "fedora-36",
+ "fedora-37",
+ "fedora-38",
+ "fedora-39",
+ "centos-stream-8-python3.9",
+ "centos-stream-9-python3.9",
+ "almalinux-8-python3.9",
+ "gentoo-python3.10",
+ "gentoo-python3.11",
+ "archlinux-latest",
+ "opensuse-15.4-gcc_11-python3.10",
+ "opensuse-15.5-gcc_11-python3.11",
+ "opensuse-tumbleweed-python3.10",
+ "opensuse-tumbleweed",
+ "debian-bullseye-i386"]
+ docker_push_repository: ghcr.io/${{ github.repository }}/
+ max_parallel: 10
+
minimal-pre:
if: ${{ success() || failure() }}
# It does not really "need" it.
diff --git a/.github/workflows/ci-macos.yml b/.github/workflows/ci-macos.yml
index c07c6968095..9482eb9632b 100644
--- a/.github/workflows/ci-macos.yml
+++ b/.github/workflows/ci-macos.yml
@@ -30,95 +30,46 @@ env:
TARGETS_OPTIONAL: ptest
jobs:
- local-macos:
+ stage-1:
+ uses: ./.github/workflows/macos.yml
+ with:
+ stage: "1"
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- stage: ["1", "2", "2-optional-0-o", "2-optional-p-z", "2-experimental-0-o", "2-experimental-p-z"]
- # python3_xcode is only accepted if enough packages are available from the system
- # --> to test "minimal", we would need https://github.com/sagemath/sage/issues/30949
- tox_env: [homebrew-macos-usrlocal-minimal, homebrew-macos-usrlocal-standard, homebrew-macos-usrlocal-maximal, homebrew-macos-usrlocal-python3_xcode-standard, conda-forge-macos-minimal, conda-forge-macos-standard, conda-forge-macos-maximal]
- xcode_version_factor: [default]
- os: [ macos-11, macos-12 ]
- env:
- TOX_ENV: local-${{ matrix.tox_env }}
- LOCAL_ARTIFACT_NAME: sage-local-commit-${{ github.sha }}-tox-local-${{ matrix.tox_env }}-${{ matrix.os }}-xcode_${{ matrix.xcode_version_factor }}
- LOGS_ARTIFACT_NAME: logs-commit-${{ github.sha }}-tox-local-${{ matrix.tox_env }}--${{ matrix.os }}-xcode_${{ matrix.xcode_version_factor }}
- steps:
- - uses: actions/checkout@v3
- - name: Select Xcode version
- run: |
- if [ ${{ matrix.xcode_version_factor }} != default ]; then sudo xcode-select -s /Applications/Xcode_${{ matrix.xcode_version_factor }}.app; fi
- - name: Install test prerequisites
- run: |
- brew install tox
- - uses: actions/download-artifact@v3
- with:
- path: sage-local-artifact
- name: ${{ env.LOCAL_ARTIFACT_NAME }}
- if: contains(matrix.stage, '2')
- - name: Extract sage-local artifact
- # This is macOS tar -- cannot use --listed-incremental
- run: |
- export SAGE_LOCAL=$(pwd)/.tox/$TOX_ENV/local
- .github/workflows/extract-sage-local.sh sage-local-artifact/sage-local-*.tar
- if: contains(matrix.stage, '2')
- - name: Build and test with tox
- # We use a high parallelization on purpose in order to catch possible parallelization bugs in the build scripts.
- # For doctesting, we use a lower parallelization to avoid timeouts.
- run: |
- case "${{ matrix.stage }}" in
- 1) export TARGETS_PRE="all-sage-local" TARGETS="all-sage-local" TARGETS_OPTIONAL="build/make/Makefile"
- ;;
- 2) export TARGETS_PRE="all-sage-local" TARGETS="build doc-html" TARGETS_OPTIONAL="ptest"
- ;;
- 2-optional*) export TARGETS_PRE="build/make/Makefile" TARGETS="build/make/Makefile"
- targets_pattern="${{ matrix.stage }}"
- targets_pattern="${targets_pattern#2-optional-}"
- export TARGETS_OPTIONAL=$( echo $(export PATH=build/bin:$PATH && sage-package list :optional: --has-file 'spkg-install.in|spkg-install|requirements.txt' --no-file huge|has_nonfree_dependencies | grep -v sagemath_doc | grep "^[$targets_pattern]" ) )
- ;;
- 2-experimental*) export TARGETS_PRE="build/make/Makefile" TARGETS="build/make/Makefile"
- targets_pattern="${{ matrix.stage }}"
- targets_pattern="${targets_pattern#2-experimental-}"
- export TARGETS_OPTIONAL=$( echo $(export PATH=build/bin:$PATH && sage-package list :experimental: --has-file 'spkg-install.in|spkg-install|requirements.txt' --no-file huge|has_nonfree_dependencies | grep -v sagemath_doc | grep "^[$targets_pattern]" ) )
- ;;
- esac
- MAKE="make -j12" tox -e $TOX_ENV -- SAGE_NUM_THREADS=4 $TARGETS
- - name: Prepare logs artifact
- run: |
- mkdir -p "artifacts/$LOGS_ARTIFACT_NAME"; cp -r .tox/*/log "artifacts/$LOGS_ARTIFACT_NAME"
- if: always()
- - uses: actions/upload-artifact@v3
- with:
- path: artifacts
- name: ${{ env.LOGS_ARTIFACT_NAME }}
- if: always()
- - name: Print out logs for immediate inspection
- # and markup the output with GitHub Actions logging commands
- run: |
- .github/workflows/scan-logs.sh "artifacts/$LOGS_ARTIFACT_NAME"
- if: always()
- - name: Prepare sage-local artifact
- # This also includes the copies of homebrew or conda installed in the tox environment.
- # We use absolute pathnames in the tar file.
- # This is macOS tar -- cannot use --remove-files.
- # We remove the $SAGE_LOCAL/lib64 link, which will be recreated by the next stage.
- run: |
- mkdir -p sage-local-artifact && (cd .tox/$TOX_ENV && rm -f "local/lib64" && tar -cf - $(pwd)) > sage-local-artifact/sage-${{ env.TOX_ENV }}-${{ matrix.stage }}.tar
- if: contains(matrix.stage, '1')
- - uses: actions/upload-artifact@v3
- with:
- path: sage-local-artifact/sage-${{ env.TOX_ENV }}-${{ matrix.stage }}.tar
- name: ${{ env.LOCAL_ARTIFACT_NAME }}
- if: always()
+ stage-2:
+ uses: ./.github/workflows/macos.yml
+ with:
+ stage: "2"
+ needs: [stage-1]
+
+ stage-2-optional-0-o:
+ uses: ./.github/workflows/macos.yml
+ with:
+ stage: "2-optional-0-o"
+ needs: [stage-2]
+
+ stage-2-optional-p-z:
+ uses: ./.github/workflows/macos.yml
+ with:
+ stage: "2-optional-p-z"
+ needs: [stage-2-optional-0-o]
+
+ stage-2-experimental-0-o:
+ uses: ./.github/workflows/macos.yml
+ with:
+ stage: "2-optional-0-o"
+ needs: [stage-2-optional-p-z]
+
+ stage-2-experimental-p-z:
+ uses: ./.github/workflows/macos.yml
+ with:
+ stage: "2-experimental-p-z"
+ needs: [stage-2-experimental-0-o]
dist:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 500
- name: fetch tags
@@ -160,7 +111,7 @@ jobs:
TOX_ENV: local-${{ matrix.tox_system_factor }}-${{ matrix.tox_packages_factor }}
LOGS_ARTIFACT_NAME: logs-commit-${{ github.sha }}-tox-local-${{ matrix.tox_system_factor }}-${{ matrix.tox_packages_factor }}-xcode_${{ matrix.xcode_version_factor }}
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
if: "!contains(matrix.tox_system_factor, 'nobootstrap')"
- uses: actions/download-artifact@v3
with:
diff --git a/.github/workflows/ci-wsl.yml b/.github/workflows/ci-wsl.yml
index e6d4d30cfc3..ae83e1d6a0f 100644
--- a/.github/workflows/ci-wsl.yml
+++ b/.github/workflows/ci-wsl.yml
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Configure git
run: git config --global core.symlinks true
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Install Ubuntu 20.04 (in WSL)
run: |
(New-Object System.Net.WebClient).DownloadFile("https://aka.ms/wslubuntu2004", "Ubuntu.appx")
diff --git a/.github/workflows/cygwin.yml b/.github/workflows/cygwin.yml
index 0d57bd8440f..26c193dfb2b 100644
--- a/.github/workflows/cygwin.yml
+++ b/.github/workflows/cygwin.yml
@@ -73,14 +73,14 @@ jobs:
choco install git python39 python39-pip --source cygwin
- name: Check out SageMath
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
repository: ${{ inputs.sage_repo }}
ref: ${{ inputs.sage_ref }}
fetch-depth: 2000
- name: Check out git-trac-command
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
repository: sagemath/git-trac-command
path: git-trac-command
diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml
index 96aae8fbc1a..3aaecc6a064 100644
--- a/.github/workflows/dist.yml
+++ b/.github/workflows/dist.yml
@@ -32,7 +32,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Install bootstrap prerequisites
run: |
sudo DEBIAN_FRONTEND=noninteractive apt-get update
@@ -51,7 +51,7 @@ jobs:
env:
CAN_DEPLOY: ${{ secrets.SAGEMATH_PYPI_API_TOKEN != '' }}
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Install bootstrap prerequisites
run: |
sudo DEBIAN_FRONTEND=noninteractive apt-get update
@@ -100,13 +100,13 @@ jobs:
#
CIBW_ARCHS: ${{ matrix.arch }}
# https://cibuildwheel.readthedocs.io/en/stable/options/#requires-python
- CIBW_PROJECT_REQUIRES_PYTHON: ">=3.8"
+ CIBW_PROJECT_REQUIRES_PYTHON: ">=3.9"
# Environment during wheel build
CIBW_ENVIRONMENT: "PATH=$(pwd)/local/bin:$PATH CPATH=$(pwd)/local/include:$CPATH LIBRARY_PATH=$(pwd)/local/lib:$LIBRARY_PATH PKG_CONFIG_PATH=$(pwd)/local/share/pkgconfig:$PKG_CONFIG_PATH ACLOCAL_PATH=/usr/share/aclocal"
# Use 'build', not 'pip wheel'
CIBW_BUILD_FRONTEND: build
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: actions/download-artifact@v3
with:
diff --git a/.github/workflows/doc-build-pdf.yml b/.github/workflows/doc-build-pdf.yml
new file mode 100644
index 00000000000..6eacba7fe52
--- /dev/null
+++ b/.github/workflows/doc-build-pdf.yml
@@ -0,0 +1,57 @@
+name: Build documentation (PDF)
+
+on:
+ pull_request:
+ push:
+ workflow_dispatch:
+ # Allow to run manually
+
+concurrency:
+ # Cancel previous runs of this workflow for the same branch
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ build-docs:
+ runs-on: ubuntu-latest
+ # Use "maximal" so that texlive is installed
+ # Use "fedora-31" for build diversity
+ container: ghcr.io/sagemath/sage/sage-docker-fedora-31-maximal-with-targets:dev
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
+
+ - name: Prepare
+ run: |
+ apt-get update && apt-get install -y zip
+ # Reuse built SAGE_LOCAL contained in the Docker image
+ ./bootstrap
+ ./configure --enable-build-as-root --prefix=/sage/local --with-sage-venv --enable-download-from-upstream-url
+
+ - name: Build
+ run: make build V=0 && make doc-pdf
+ env:
+ MAKE: make -j2
+ SAGE_NUM_THREADS: 2
+ TEXMFHOME: /sage/texmf
+
+ - name: Copy docs
+ run: |
+ # For some reason the deploy step below cannot find /sage/...
+ # So copy everything from there to local folder
+ mkdir -p ./docs
+ cp -r -L /sage/local/share/doc/sage/pdf/en/* ./docs
+ # Zip everything for increased performance
+ zip -r docs-pdf.zip docs
+
+ - name: Upload docs
+ uses: actions/upload-artifact@v3
+ with:
+ name: docs-pdf
+ path: docs-pdf.zip
diff --git a/.github/workflows/doc-build.yml b/.github/workflows/doc-build.yml
index 1cc9cf8cd3f..04ccb2b28fe 100644
--- a/.github/workflows/doc-build.yml
+++ b/.github/workflows/doc-build.yml
@@ -12,37 +12,141 @@ concurrency:
cancel-in-progress: true
jobs:
+ get_ci_fixes:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
+ - name: Store CI fixes in upstream artifact
+ run: |
+ mkdir -p upstream
+ git format-patch --stdout test_base > upstream/ci_fixes.patch
+ - uses: actions/upload-artifact@v3
+ with:
+ path: upstream
+ name: upstream
+
build-docs:
runs-on: ubuntu-latest
- container: ghcr.io/sagemath/sage/sage-docker-ubuntu-focal-standard-with-targets:dev
+ container: ghcr.io/sagemath/sage/sage-ubuntu-focal-standard-with-targets:dev
+ needs: [get_ci_fixes]
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
+
+ - name: Update system packages
+ run: |
+ apt-get update && apt-get install -y git zip
- - name: Prepare
+ - name: Add prebuilt tree as a worktree
+ id: worktree
run: |
- apt-get update && apt-get install -y zip
- # Reuse built SAGE_LOCAL contained in the Docker image
- ./bootstrap
- ./configure --enable-build-as-root --prefix=/sage/local --with-sage-venv --enable-download-from-upstream-url
+ set -ex
+ git config --global user.email "ci-sage@example.com"
+ git config --global user.name "Build & Test workflow"
+ git config --global --add safe.directory $(pwd)
+ # If actions/checkout downloaded our source tree using the GitHub REST API
+ # instead of with git (because do not have git installed in our image),
+ # we first make the source tree a repo.
+ if [ ! -d .git ]; then git init && git add -A && git commit --quiet -m "new"; fi
+ # Tag this state of the source tree "new". This is what we want to build and test.
+ git tag -f new
+ # Our container image contains a source tree in /sage with a full build of Sage.
+ # But /sage is not a git repository.
+ # We make /sage a worktree whose index is at tag "new".
+ # We then commit the current sources and set the tag "old". (This keeps all mtimes unchanged.)
+ # Then we update worktree and index with "git reset --hard new".
+ # (This keeps mtimes of unchanged files unchanged and mtimes of changed files newer than unchanged files.)
+ # Finally we reset the index to "old". (This keeps all mtimes unchanged.)
+ # The changed files now show up as uncommitted changes.
+ # The final "git add -N" makes sure that files that were added in "new" do not show
+ # as untracked files, which would be removed by "git clean -fx".
+ git worktree add --detach worktree-image
+ rm -rf /sage/.git && mv worktree-image/.git /sage/
+ rm -rf worktree-image && ln -s /sage worktree-image
+ if [ ! -f worktree-image/.gitignore ]; then cp .gitignore worktree-image/; fi
+ (cd worktree-image && git add -A && git commit --quiet --allow-empty -m "old" -a && git tag -f old && git reset --hard new && git reset --quiet old && git add -N . && git status)
+ # Keep track of changes to built HTML
+ new_version=$(cat src/VERSION.txt); (cd /sage/local/share/doc/sage/html/en && find . -name "*.html" | xargs sed -i '/class="sidebar-brand-text"/s/Sage [0-9a-z.]* /Sage '$new_version' /'; git init && (echo "*.svg binary"; echo "*.pdf binary") >> .gitattributes && (echo ".buildinfo"; echo '*.inv'; echo '.git*'; echo '*.svg'; echo '*.pdf'; echo '*.png'; echo 'searchindex.js') > .gitignore; git add -A && git commit --quiet -m "old")
- - name: Build
- run: make doc-html
+ - name: Download upstream artifact
+ uses: actions/download-artifact@v3
+ with:
+ path: upstream
+ name: upstream
+
+ - name: Apply CI fixes from sagemath/sage
+ # After applying the fixes, make sure all changes are marked as uncommitted changes.
+ run: |
+ if [ -r upstream/ci_fixes.patch ]; then
+ (cd worktree-image && git commit -q -m "current changes" --allow-empty -a && git am; git reset --quiet old; git add -N .) < upstream/ci_fixes.patch
+ fi
+
+ - name: Incremental build
+ id: incremental
+ run: |
+ # Now re-bootstrap and build. The build is incremental because we were careful with the timestamps.
+ ./bootstrap && make build
+ working-directory: ./worktree-image
+ env:
+ MAKE: make -j2 --output-sync=recurse
+ SAGE_NUM_THREADS: 2
+
+ - name: Build (fallback to non-incremental)
+ id: build
+ if: always() && steps.worktree.outcome == 'success' && steps.incremental.outcome != 'success'
+ run: |
+ set -ex
+ make doc-clean doc-uninstall sagelib-clean && git clean -fx src/sage && ./config.status && make build
+ working-directory: ./worktree-image
env:
- MAKE: make -j2
+ MAKE: make -j2 --output-sync=recurse
+ SAGE_NUM_THREADS: 2
+
+ - name: Build docs
+ id: docbuild
+ if: always() && (steps.incremental.outcome == 'success' || steps.build.outcome == 'success')
+ # Always non-incremental because of the concern that
+ # incremental docbuild may introduce broken links (inter-file references) though build succeeds
+ run: |
+ set -ex
+ export SAGE_USE_CDNS=yes
+ mv /sage/local/share/doc/sage/html/en/.git /sage/.git-doc
+ make doc-clean doc-uninstall sagelib-clean && git clean -fx src/sage
+ mkdir -p /sage/local/share/doc/sage/html/en/ && mv /sage/.git-doc /sage/local/share/doc/sage/html/en/.git
+ ./config.status && make doc-html
+ working-directory: ./worktree-image
+ env:
+ MAKE: make -j2 --output-sync=recurse
SAGE_NUM_THREADS: 2
- name: Copy docs
+ id: copy
+ if: always() && steps.docbuild.outcome == 'success'
run: |
+ set -ex
+ mkdir -p ./docs
+ # Create changelog
+ echo '## Preview of CHANGES.html'
+ (cd /sage/local/share/doc/sage/html/en && git diff --name-only) | tee ./docs/CHANGES.txt
+ (cd /sage/local/share/doc/sage/html/en && git diff; rm -rf .git) > ./docs/html.diff
+ echo '## Preview of html.diff'; head -n 400 ./docs/html.diff
+ (echo 'HTML diff'; sed -E 's,(.*),
\1,' ./docs/CHANGES.txt) > ./docs/CHANGES.html
# For some reason the deploy step below cannot find /sage/...
# So copy everything from there to local folder
# We also need to replace the symlinks because netlify is not following them
- mkdir -p ./docs
cp -r -L /sage/local/share/doc/sage/html/en/* ./docs
# Zip everything for increased performance
zip -r docs.zip docs
- name: Upload docs
+ if: always() && steps.copy.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: docs
diff --git a/.github/workflows/doc-publish.yml b/.github/workflows/doc-publish.yml
index c7be4a46d3b..14337131420 100644
--- a/.github/workflows/doc-publish.yml
+++ b/.github/workflows/doc-publish.yml
@@ -1,4 +1,4 @@
-# Triggers after the documentation build has finished,
+# Triggers after the documentation build has finished,
# taking the artifact and uploading it to netlify
name: Publish documentation
@@ -28,10 +28,10 @@ jobs:
# Once https://github.com/actions/download-artifact/issues/172 and/or https://github.com/actions/download-artifact/issues/60 is implemented, we can use the official download-artifact action
# For now use the solution from https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#using-data-from-the-triggering-workflow
- name: Download docs
- uses: actions/github-script@v3.1.0
+ uses: actions/github-script@v6.4.0
with:
script: |
- var artifacts = await github.actions.listWorkflowRunArtifacts({
+ var artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
@@ -39,7 +39,7 @@ jobs:
var matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "docs"
})[0];
- var download = await github.actions.downloadArtifact({
+ var download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
@@ -47,7 +47,7 @@ jobs:
});
var fs = require('fs');
fs.writeFileSync('${{github.workspace}}/docs.zip', Buffer.from(download.data));
-
+
- name: Extract docs
run: unzip docs.zip -d docs && unzip docs/docs.zip -d docs/docs
@@ -72,11 +72,10 @@ jobs:
header: preview-comment
recreate: true
message: |
- [Documentation preview for this PR](${{ steps.deploy-netlify.outputs.NETLIFY_URL }}) is ready! :tada:
- Built with commit: ${{ steps.source-run-info.outputs.sourceHeadSha }}
-
+ [Documentation preview for this PR](${{ steps.deploy-netlify.outputs.NETLIFY_URL }}) (built with commit ${{ steps.source-run-info.outputs.sourceHeadSha }}; [changes](${{ steps.deploy-netlify.outputs.NETLIFY_URL }}/CHANGES.html)) is ready! :tada:
+
- name: Update deployment status PR check
- uses: myrotvorets/set-commit-status-action@1.1.6
+ uses: myrotvorets/set-commit-status-action@v2.0.0
if: ${{ always() }}
env:
DEPLOY_SUCCESS: Successfully deployed preview.
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 04c74659c1d..db4896bbd12 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -18,24 +18,23 @@ on:
default: >-
["ubuntu-trusty-toolchain-gcc_9",
"ubuntu-xenial-toolchain-gcc_9",
- "ubuntu-bionic-gcc_8-python3.8",
+ "ubuntu-bionic-gcc_8",
"ubuntu-focal",
"ubuntu-jammy",
- "ubuntu-kinetic",
"ubuntu-lunar",
- "debian-buster",
+ "ubuntu-mantic",
+ "debian-buster-gcc_spkg",
"debian-bullseye",
"debian-bookworm",
+ "debian-trixie",
"debian-sid",
- "linuxmint-19-gcc_8-python3.8",
- "linuxmint-19.3-gcc_8-python3.8",
"linuxmint-20.1",
"linuxmint-20.2",
"linuxmint-20.3",
"linuxmint-21",
"linuxmint-21.1",
- "fedora-29-python3.8",
- "fedora-30-python3.8",
+ "linuxmint-21.2",
+ "fedora-30",
"fedora-31",
"fedora-32",
"fedora-33",
@@ -44,19 +43,23 @@ on:
"fedora-36",
"fedora-37",
"fedora-38",
+ "fedora-39",
"centos-7-devtoolset-gcc_11",
"centos-stream-8-python3.9",
"centos-stream-9-python3.9",
- "gentoo-python3.9",
+ "almalinux-8-python3.9",
+ "almalinux-9-python3.11",
"gentoo-python3.10",
"gentoo-python3.11",
"archlinux-latest",
"opensuse-15.3-gcc_11-python3.9",
"opensuse-15.4-gcc_11-python3.10",
+ "opensuse-15.5-gcc_11-python3.11",
"opensuse-tumbleweed-python3.10",
+ "opensuse-tumbleweed",
"conda-forge",
"ubuntu-bionic-gcc_8-i386",
- "debian-buster-i386",
+ "debian-bullseye-i386",
]
tox_packages_factors:
description: 'Stringified JSON object listing tox packages factors'
@@ -71,7 +74,7 @@ on:
default: ""
max_parallel:
type: number
- default: 24
+ default: 30
free_disk_space:
default: false
type: boolean
@@ -101,7 +104,7 @@ on:
default: "$BUILD_TAG"
type: string
#
- # For use in upstream CIs
+ # For use in upstream CIs. sage_trac_* are now ignored and will be removed later.
#
upstream_artifact:
required: false
@@ -142,13 +145,11 @@ jobs:
EXTRA_SAGE_PACKAGES: ${{ inputs.extra_sage_packages }}
steps:
- name: Check out SageMath
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
repository: ${{ inputs.sage_repo }}
ref: ${{ inputs.sage_ref }}
fetch-depth: 10000
- - name: fetch tags
- run: git fetch --depth=1 origin +refs/tags/*:refs/tags/*
- name: free disk space
run: |
df -h
@@ -161,21 +162,6 @@ jobs:
sudo apt-get --fix-broken --yes remove $(dpkg-query -f '${Package}\n' -W | grep -E '^(ghc-|google-cloud-sdk|google-chrome|firefox|mysql-server|dotnet-sdk|hhvm|mono)') || echo "(error ignored)"
df -h
if: inputs.free_disk_space
- - name: Check out git-trac-command
- uses: actions/checkout@v3
- with:
- repository: sagemath/git-trac-command
- path: git-trac-command
- if: inputs.sage_trac_git != ''
- - name: Check out SageMath from trac.sagemath.org
- shell: bash {0}
- # Random sleep and retry to limit the load on trac.sagemath.org
- run: |
- git config --global user.email "ci-sage@example.com"
- git config --global user.name "ci-sage workflow"
- if [ ! -d .git ]; then git init; fi; git remote add trac ${{ inputs.sage_trac_git }} && x=1 && while [ $x -le 5 ]; do x=$(( $x + 1 )); sleep $(( $RANDOM % 60 + 1 )); if git-trac-command/git-trac fetch ${{ inputs.sage_trac_ticket }}; then git merge FETCH_HEAD || echo "(ignored)"; exit 0; fi; sleep 40; done; exit 1
- if: inputs.sage_trac_git != ''
-
- name: Download upstream artifact
uses: actions/download-artifact@v3
with:
@@ -193,6 +179,12 @@ jobs:
(export PATH=$(pwd)/build/bin:$PATH; (cd upstream && bash -x update-pkgs.sh) && sed -i.bak '/upstream/d' .dockerignore && echo "/:toolchain:/i ADD upstream upstream" | sed -i.bak -f - build/bin/write-dockerfile.sh && git diff)
if: inputs.upstream_artifact
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
+
- name: Try to login to ghcr.io
if: inputs.docker_push_repository != ''
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 883749e2bf7..b4eebb4ac9e 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -13,11 +13,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
- name: Set up Python
uses: actions/setup-python@v4
with:
- python-version: 3.8
+ python-version: 3.9
- name: Install pycodestyle
run: pip install tox pycodestyle
- name: Lint using pycodestyle
@@ -27,11 +32,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
- name: Set up Python
uses: actions/setup-python@v4
with:
- python-version: 3.8
+ python-version: 3.9
- name: Install relint
run: pip install tox relint
- name: Lint using relint
@@ -41,11 +51,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
- name: Set up Python
uses: actions/setup-python@v4
with:
- python-version: 3.8
+ python-version: 3.9
- name: Install tox
run: pip install tox
- name: Lint using tox -e rst
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
new file mode 100644
index 00000000000..5b448cec1bb
--- /dev/null
+++ b/.github/workflows/macos.yml
@@ -0,0 +1,159 @@
+name: Reusable workflow for macOS portability CI
+
+on:
+ workflow_call:
+ inputs:
+ # Either specify a stage
+ stage:
+ required: false
+ type: string
+ # Or specify targets
+ targets_pre:
+ default: build/make/Makefile
+ type: string
+ targets:
+ default: build/make/Makefile
+ type: string
+ targets_optional:
+ default: build/make/Makefile
+ type: string
+ # System configuration
+ osversion_xcodeversion_toxenv_tuples:
+ description: 'Stringified JSON object'
+ default: >-
+ [["latest", "", "homebrew-macos-usrlocal-minimal"],
+ ["latest", "", "homebrew-macos-usrlocal-standard"],
+ ["11", "xcode_11.7", "homebrew-macos-usrlocal-standard"],
+ ["12", "", "homebrew-macos-usrlocal-standard"],
+ ["13", "xcode_15.0", "homebrew-macos-usrlocal-standard"],
+ ["latest", "", "homebrew-macos-usrlocal-maximal"],
+ ["latest", "", "homebrew-macos-usrlocal-python3_xcode-standard"],
+ ["latest", "", "conda-forge-macos-minimal"],
+ ["latest", "", "conda-forge-macos-standard"],
+ ["latest", "", "conda-forge-macos-maximal"]]
+ type: string
+ extra_sage_packages:
+ description: 'Extra Sage packages to install as system packages'
+ type: string
+ default: ""
+ max_parallel:
+ type: number
+ default: 10
+ free_disk_space:
+ default: false
+ type: boolean
+ #
+ # For use in upstream CIs.
+ #
+ upstream_artifact:
+ required: false
+ type: string
+ sage_repo:
+ required: false
+ type: string
+ sage_ref:
+ required: false
+ type: string
+
+jobs:
+ local-macos:
+
+ runs-on: macos-${{ matrix.osversion_xcodeversion_toxenv[0] }}
+ strategy:
+ fail-fast: false
+ max-parallel: ${{ inputs.max_parallel }}
+ matrix:
+ osversion_xcodeversion_toxenv: ${{ fromJson(inputs.osversion_xcodeversion_toxenv_tuples) }}
+ env:
+ TOX_ENV: local-${{ matrix.osversion_xcodeversion_toxenv[2] }}${{ matrix.osversion_xcodeversion_toxenv[1] && format('-{0}', matrix.osversion_xcodeversion_toxenv[1]) }}
+ LOCAL_ARTIFACT_NAME: sage-local-commit-${{ github.sha }}-tox-local-${{ matrix.osversion_xcodeversion_toxenv[2] }}-macos-${{ matrix.osversion_xcodeversion_toxenv[0] }}${{ matrix.osversion_xcodeversion_toxenv[1] && format('-{0}', matrix.osversion_xcodeversion_toxenv[1]) }}
+ LOGS_ARTIFACT_NAME: logs-commit-${{ github.sha }}-tox-local-${{ matrix.osversion_xcodeversion_toxenv[2] }}-macos-${{ matrix.osversion_xcodeversion_toxenv[0] }}${{ matrix.osversion_xcodeversion_toxenv[1] && format('-{0}', matrix.osversion_xcodeversion_toxenv[1]) }}
+ steps:
+ - name: Check out SageMath
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ inputs.sage_repo }}
+ ref: ${{ inputs.sage_ref }}
+ fetch-depth: 10000
+
+ - name: Install test prerequisites
+ run: |
+ brew install tox
+ - name: Download upstream artifact
+ uses: actions/download-artifact@v3
+ with:
+ path: upstream
+ name: ${{ inputs.upstream_artifact }}
+ if: inputs.upstream_artifact
+ - name: Update Sage packages from upstream artifact
+ run: |
+ (export PATH=$(pwd)/build/bin:$PATH; (cd upstream && bash -x update-pkgs.sh) && git diff)
+ if: inputs.upstream_artifact
+
+ - name: Merge CI fixes from sagemath/sage
+ run: |
+ .ci/merge-fixes.sh
+ env:
+ GH_TOKEN: ${{ github.token }}
+
+ - uses: actions/download-artifact@v3
+ with:
+ path: sage-local-artifact
+ name: ${{ env.LOCAL_ARTIFACT_NAME }}
+ if: contains(inputs.stage, '2')
+ - name: Extract sage-local artifact
+ # This is macOS tar -- cannot use --listed-incremental
+ run: |
+ export SAGE_LOCAL=$(pwd)/.tox/$TOX_ENV/local
+ .github/workflows/extract-sage-local.sh sage-local-artifact/sage-local-*.tar
+ if: contains(inputs.stage, '2')
+ - name: Build and test with tox
+ # We use a high parallelization on purpose in order to catch possible parallelization bugs in the build scripts.
+ # For doctesting, we use a lower parallelization to avoid timeouts.
+ run: |
+ case "${{ inputs.stage }}" in
+ 1) export TARGETS_PRE="all-sage-local" TARGETS="all-sage-local" TARGETS_OPTIONAL="build/make/Makefile"
+ ;;
+ 2) export TARGETS_PRE="all-sage-local" TARGETS="build doc-html" TARGETS_OPTIONAL="ptest"
+ ;;
+ 2-optional*) export TARGETS_PRE="build/make/Makefile" TARGETS="build/make/Makefile"
+ targets_pattern="${{ inputs.stage }}"
+ targets_pattern="${targets_pattern#2-optional-}"
+ export TARGETS_OPTIONAL=$( echo $(export PATH=build/bin:$PATH && sage-package list :optional: --has-file 'spkg-install.in|spkg-install|requirements.txt' --no-file huge|has_nonfree_dependencies | grep -v sagemath_doc | grep "^[$targets_pattern]" ) )
+ ;;
+ 2-experimental*) export TARGETS_PRE="build/make/Makefile" TARGETS="build/make/Makefile"
+ targets_pattern="${{ inputs.stage }}"
+ targets_pattern="${targets_pattern#2-experimental-}"
+ export TARGETS_OPTIONAL=$( echo $(export PATH=build/bin:$PATH && sage-package list :experimental: --has-file 'spkg-install.in|spkg-install|requirements.txt' --no-file huge|has_nonfree_dependencies | grep -v sagemath_doc | grep "^[$targets_pattern]" ) )
+ ;;
+ *) export TARGETS_PRE="${{ inputs.targets_pre }}" TARGETS="${{ inputs.targets }} TARGETS_OPTIONAL="${{ inputs.targets_optional }}
+ ;;
+ esac
+ MAKE="make -j12" tox -e $TOX_ENV -- SAGE_NUM_THREADS=4 $TARGETS
+ - name: Prepare logs artifact
+ run: |
+ mkdir -p "artifacts/$LOGS_ARTIFACT_NAME"; cp -r .tox/*/log "artifacts/$LOGS_ARTIFACT_NAME"
+ if: always()
+ - uses: actions/upload-artifact@v3
+ with:
+ path: artifacts
+ name: ${{ env.LOGS_ARTIFACT_NAME }}
+ if: always()
+ - name: Print out logs for immediate inspection
+ # and markup the output with GitHub Actions logging commands
+ run: |
+ .github/workflows/scan-logs.sh "artifacts/$LOGS_ARTIFACT_NAME"
+ if: always()
+ - name: Prepare sage-local artifact
+ # This also includes the copies of homebrew or conda installed in the tox environment.
+ # We use absolute pathnames in the tar file.
+ # This is macOS tar -- cannot use --remove-files.
+ # We remove the $SAGE_LOCAL/lib64 link, which will be recreated by the next stage.
+ run: |
+ mkdir -p sage-local-artifact && (cd .tox/$TOX_ENV && rm -f "local/lib64" && tar -cf - $(pwd)) > sage-local-artifact/sage-${{ env.TOX_ENV }}-${{ inputs.stage }}.tar
+ if: contains(inputs.stage, '1')
+ - uses: actions/upload-artifact@v3
+ with:
+ path: sage-local-artifact/sage-${{ env.TOX_ENV }}-${{ inputs.stage }}.tar
+ name: ${{ env.LOCAL_ARTIFACT_NAME }}
+ if: always()
diff --git a/.github/workflows/push_to_docker_hub.yml b/.github/workflows/push_to_docker_hub.yml
new file mode 100644
index 00000000000..eb36f3c979a
--- /dev/null
+++ b/.github/workflows/push_to_docker_hub.yml
@@ -0,0 +1,126 @@
+name: Build Docker images and push to DockerHub
+
+on:
+ workflow_dispatch:
+ # Allow to run manually
+ branches:
+ - 'develop'
+ - 'docker_hub_gha'
+ push:
+ tags:
+ # Just create image on pushing a tag
+ - '*'
+
+jobs:
+ sagemath-dev:
+ name: Build Docker image on target make-build and push to DockerHub sagemath-dev
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Set tag
+ # docker/metadata-action@v4 is not used since we need to distinguish
+ # between latest and develop tags
+ id: set_tag
+ run: |
+ git fetch --depth=1 origin +refs/tags/*:refs/tags/*
+ TAG_NAME=$(git tag --sort=v:refname | tail -1)
+ TAG="sagemath/sagemath-dev:$TAG_NAME"
+ TAG_LIST="$TAG, sagemath/sagemath-dev:develop"
+ TAG_LIST="$TAG" # don't tag develop until meaning of sagemath-dev is clear
+ echo "TAG_NAME=$TAG_NAME" >> $GITHUB_ENV
+ echo "TAG=$TAG" >> $GITHUB_ENV
+ echo "TAG_LIST=$TAG_LIST" >> $GITHUB_ENV
+
+ - name: Update Tag List
+ id: upd_tag_list
+ run: |
+ TAG_LIST="${{ env.TAG_LIST }}, sagemath/sagemath-dev:latest"
+ TAG_LIST="${{ env.TAG_LIST }}" # don't tag latest until meaning of sagemath-dev is clear
+ echo "TAG_LIST=$TAG_LIST" >> $GITHUB_ENV
+ if: "!contains(env.TAG_NAME, 'beta') && !contains(env.TAG_NAME, 'rc')"
+
+ - name: Check env
+ run: |
+ echo ${{ env.TAG_NAME }}
+ echo ${{ env.TAG }}
+ echo ${{ env.TAG_LIST }}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Build and push make-build
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ file: docker/Dockerfile
+ target: make-build # see the corresponding header-note
+ push: true
+ tags: ${{ env.TAG_LIST }}
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
+
+ sagemath:
+ needs: sagemath-dev
+ name: Build Docker image on target sagemath and push to DockerHub sagemath
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Set tag
+ # docker/metadata-action@v4 is not used since we need to distinguish
+ # between latest and develop tags
+ id: set_tag
+ run: |
+ git fetch --depth=1 origin +refs/tags/*:refs/tags/*
+ TAG_NAME=$(git tag --sort=v:refname | tail -1)
+ TAG="sagemath/sagemath:$TAG_NAME"
+ TAG_LIST="$TAG, sagemath/sagemath:develop"
+ BASE="sagemath/sagemath-dev:$TAG_NAME"
+ echo "TAG_NAME=$TAG_NAME" >> $GITHUB_ENV
+ echo "TAG=$TAG" >> $GITHUB_ENV
+ echo "TAG_LIST=$TAG_LIST" >> $GITHUB_ENV
+ echo "BASE=$BASE" >> $GITHUB_ENV
+
+ - name: Update Tag List
+ id: upd_tag_list
+ run: |
+ TAG_LIST="${{ env.TAG_LIST }}, sagemath/sagemath:latest"
+ echo "TAG_LIST=$TAG_LIST" >> $GITHUB_ENV
+ if: "!contains(env.TAG_NAME, 'beta') && !contains(env.TAG_NAME, 'rc')"
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Build and push sagemath
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ file: docker/Dockerfile
+ build-args: |
+ MAKE_BUILD=${{ env.BASE }}
+ target: sagemath
+ push: true
+ tags: ${{ env.TAG_LIST }}
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
diff --git a/.github/workflows/sync_labels.yml b/.github/workflows/sync_labels.yml
new file mode 100644
index 00000000000..f9378d1fe9d
--- /dev/null
+++ b/.github/workflows/sync_labels.yml
@@ -0,0 +1,57 @@
+# This workflow synchronizes groups of labels that correspond
+# to items of selection list in Trac. It controls that in each
+# such case there is just one label of the list present.
+# Furthermore in the case of the state it checks the labels
+# to coincide with the corresponding review state.
+
+name: Synchronize labels
+
+on:
+ issues:
+ types: [opened, reopened, closed, labeled, unlabeled]
+ pull_request_review:
+ types: [submitted]
+ pull_request_target:
+ types: [opened, reopened, closed, ready_for_review, converted_to_draft, synchronize, labeled, unlabeled]
+ schedule:
+ # run cleaning of warning comments twice a day
+ - cron: '00 6,18 * * *'
+
+jobs:
+ synchronize:
+ if: | # check variables from repository settings to suspend the job
+ vars.SYNC_LABELS_ACTIVE == 'yes' && (! vars.SYNC_LABELS_IGNORE_EVENTS || ! contains(fromJSON(vars.SYNC_LABELS_IGNORE_EVENTS), github.event.action))
+ runs-on: ubuntu-latest
+ steps:
+ # Checkout the Python script
+ - name: Checkout files
+ uses: Bhacaz/checkout-files@v2
+ with:
+ files: .github/sync_labels.py
+
+ # Perform synchronization
+ - name: Call script for synchronization
+ if: github.event.schedule == ''
+ run: |
+ chmod a+x .github/sync_labels.py
+ .github/sync_labels.py $ACTION $ISSUE_URL $PR_URL $ACTOR "$LABEL" "$REV_STATE" $LOG_LEVEL
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ ACTION: ${{ github.event.action }}
+ ISSUE_URL: ${{ github.event.issue.html_url }}
+ PR_URL: ${{ github.event.pull_request.html_url }}
+ ACTOR: ${{ github.actor }}
+ LABEL: ${{ github.event.label.name }}
+ REV_STATE: ${{ github.event.review.state }}
+ LOG_LEVEL: ${{ vars.SYNC_LABELS_LOG_LEVEL }} # variable from repository settings, values can be "--debug", "--info" or "--warning"
+
+ # Perform cleaning
+ - name: Call script for cleaning
+ if: github.event.schedule != ''
+ run: |
+ chmod a+x .github/sync_labels.py
+ .github/sync_labels.py $REPO_URL $LOG_LEVEL
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ REPO_URL: ${{ github.event.repository.html_url }}
+ LOG_LEVEL: ${{ vars.SYNC_LABELS_LOG_LEVEL }} # variable from repository settings, values can be "--debug", "--info" or "--warning"
diff --git a/.gitignore b/.gitignore
index 2faf325a44c..efdbfa8f616 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,8 +19,10 @@
/config.status
/configure
/conftest*
+/confdefs.h
/m4/sage_spkg_configures.m4
+/m4/sage_spkg_versions*.m4
# no longer generated, but may still be in user worktrees
/src/lib/pkgconfig
@@ -174,14 +176,32 @@ build/bin/sage-build-env-config
/pkgs/*/.tox
/pkgs/sagemath-objects/setup.cfg
+/pkgs/sagemath-bliss/setup.cfg
+/pkgs/sagemath-coxeter3/setup.cfg
+/pkgs/sagemath-mcqd/setup.cfg
+/pkgs/sagemath-meataxe/setup.cfg
+/pkgs/sagemath-sirocco/setup.cfg
+/pkgs/sagemath-tdlib/setup.cfg
/pkgs/sagemath-categories/setup.cfg
/pkgs/sagemath-environment/setup.cfg
/pkgs/sagemath-repl/setup.cfg
/pkgs/sagemath-objects/pyproject.toml
+/pkgs/sagemath-bliss/pyproject.toml
+/pkgs/sagemath-coxeter3/pyproject.toml
+/pkgs/sagemath-mcqd/pyproject.toml
+/pkgs/sagemath-meataxe/pyproject.toml
+/pkgs/sagemath-sirocco/pyproject.toml
+/pkgs/sagemath-tdlib/pyproject.toml
/pkgs/sagemath-categories/pyproject.toml
/pkgs/sagemath-environment/pyproject.toml
/pkgs/sagemath-repl/pyproject.toml
/pkgs/sagemath-objects/requirements.txt
+/pkgs/sagemath-bliss/requirements.txt
+/pkgs/sagemath-coxeter3/requirements.txt
+/pkgs/sagemath-mcqd/requirements.txt
+/pkgs/sagemath-meataxe/requirements.txt
+/pkgs/sagemath-sirocco/requirements.txt
+/pkgs/sagemath-tdlib/requirements.txt
/pkgs/sagemath-categories/requirements.txt
/pkgs/sagemath-environment/requirements.txt
/pkgs/sagemath-repl/requirements.txt
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 58c9bc7af2b..b3079a7c4ee 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -27,7 +27,7 @@
"python.linting.enabled": true,
// The following pycodestyle arguments are the same as the pycodestyle-minimal
// tox environnment, see the file SAGE_ROOT/src/tox.ini
- "python.linting.pycodestyleArgs": ["--select=E111,E306,E401,E701,E702,E703,W291,W391,W605,E711,E712,E713,E721,E722"],
+ "python.linting.pycodestyleArgs": ["--select= E111,E21,E222,E227,E25,E271,E303,E305,E306,E401,E502,E701,E702,E703,E71,E72,W291,W293,W391,W605"],
"cSpell.words": [
"furo",
"Conda",
diff --git a/.zenodo.json b/.zenodo.json
deleted file mode 100644
index 45c935970a7..00000000000
--- a/.zenodo.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "description": "Mirror of the Sage https://sagemath.org/ source tree",
- "license": "other-open",
- "title": "sagemath/sage: 10.0.beta1",
- "version": "10.0.beta1",
- "upload_type": "software",
- "publication_date": "2023-02-19",
- "creators": [
- {
- "affiliation": "SageMath.org",
- "name": "The SageMath Developers"
- }
- ],
- "access_right": "open",
- "related_identifiers": [
- {
- "scheme": "url",
- "identifier": "https://github.com/sagemath/sage/tree/10.0.beta1",
- "relation": "isSupplementTo"
- },
- {
- "scheme": "doi",
- "identifier": "10.5281/zenodo.593563",
- "relation": "isNewVersionOf"
- }
- ]
-}
diff --git a/.zenodo.json.in b/.zenodo.json.in
deleted file mode 100644
index 744e020705a..00000000000
--- a/.zenodo.json.in
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "description": "Mirror of the Sage https://sagemath.org/ source tree",
- "license": "other-open",
- "title": "sagemath/sage: ${SAGE_VERSION}",
- "version": "${SAGE_VERSION}",
- "upload_type": "software",
- "publication_date": "${SAGE_RELEASE_DATE}",
- "creators": [
- {
- "affiliation": "SageMath.org",
- "name": "The SageMath Developers"
- }
- ],
- "access_right": "open",
- "related_identifiers": [
- {
- "scheme": "url",
- "identifier": "https://github.com/sagemath/sage/tree/${SAGE_VERSION}",
- "relation": "isSupplementTo"
- },
- {
- "scheme": "doi",
- "identifier": "10.5281/zenodo.593563",
- "relation": "isNewVersionOf"
- }
- ]
-}
diff --git a/CITATION.cff b/CITATION.cff
index d97e13cb2cb..7c31fd13f05 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -4,8 +4,8 @@ title: SageMath
abstract: SageMath is a free open-source mathematics software system.
authors:
- name: "The SageMath Developers"
-version: 9.5
+version: 10.2.beta5
doi: 10.5281/zenodo.593563
-date-released: 2022-01-18
+date-released: 2023-09-27
repository-code: "https://github.com/sagemath/sage"
url: "https://www.sagemath.org/"
diff --git a/CITATION.cff.in b/CITATION.cff.in
new file mode 100644
index 00000000000..bf7d5d3e58c
--- /dev/null
+++ b/CITATION.cff.in
@@ -0,0 +1,11 @@
+cff-version: 1.2.0
+message: "If you use this software, please cite it as below."
+title: SageMath
+abstract: SageMath is a free open-source mathematics software system.
+authors:
+- name: "The SageMath Developers"
+version: ${SAGE_VERSION}
+doi: 10.5281/zenodo.593563
+date-released: ${SAGE_RELEASE_DATE}
+repository-code: "https://github.com/sagemath/sage"
+url: "https://www.sagemath.org/"
diff --git a/COPYING.txt b/COPYING.txt
index a785477fb4f..d5c28eacead 100644
--- a/COPYING.txt
+++ b/COPYING.txt
@@ -51,7 +51,6 @@ elliptic_curves None (database)
extcode GPLv2+
fflas_ffpack LGPLv2.1+
flint GPLv2+
-flintqs GPLv2+
fplll LGPLv2.1+
freetype FreeType License (similar to BSD; see below)
gap GPLv2+
diff --git a/Makefile b/Makefile
index 008d6ff2221..11a7b77665a 100644
--- a/Makefile
+++ b/Makefile
@@ -80,42 +80,6 @@ download:
dist: build/make/Makefile
./sage --sdist
-pypi-sdists: sage_setup
- ./sage --sh build/pkgs/sage_conf/spkg-src
- ./sage --sh build/pkgs/sage_sws2rst/spkg-src
- ./sage --sh build/pkgs/sage_docbuild/spkg-src
- ./sage --sh build/pkgs/sage_setup/spkg-src
- ./sage --sh build/pkgs/sagelib/spkg-src
- ./sage --sh build/pkgs/sagemath_objects/spkg-src
- ./sage --sh build/pkgs/sagemath_categories/spkg-src
- ./sage --sh build/pkgs/sagemath_environment/spkg-src
- ./sage --sh build/pkgs/sagemath_repl/spkg-src
- @echo "Built sdists are in upstream/"
-
-# Ensuring wheels are present, even for packages that may have been installed
-# as editable. Until we have better uninstallation of script packages, we
-# just remove the timestamps, which will lead to rebuilds of the packages.
-PYPI_WHEEL_PACKAGES = sage_sws2rst sage_setup sagemath_environment sagemath_objects sagemath_repl sagemath_categories
-pypi-wheels:
- for a in $(PYPI_WHEEL_PACKAGES); do \
- rm -f venv/var/lib/sage/installed/$$a-*; \
- done
- for a in $(PYPI_WHEEL_PACKAGES); do \
- $(MAKE) SAGE_EDITABLE=no SAGE_WHEELS=yes $$a; \
- done
- @echo "Built wheels are in venv/var/lib/sage/wheels/"
-
-# sage_docbuild is here, not in PYPI_WHEEL_PACKAGES, because it depends on sagelib
-WHEEL_PACKAGES = $(PYPI_WHEEL_PACKAGES) sage_conf sagelib sage_docbuild
-wheels:
- for a in $(WHEEL_PACKAGES); do \
- rm -f venv/var/lib/sage/installed/$$a-*; \
- done
- for a in $(WHEEL_PACKAGES); do \
- $(MAKE) SAGE_EDITABLE=no SAGE_WHEELS=yes $$a; \
- done
- @echo "Built wheels are in venv/var/lib/sage/wheels/"
-
###############################################################################
# Cleaning up
###############################################################################
@@ -380,7 +344,6 @@ list:
@$(MAKE) --silent -f build/make/Makefile SAGE_PKGCONFIG=dummy $@
.PHONY: default build dist install micro_release \
- pypi-sdists pypi-wheels wheels \
misc-clean bdist-clean distclean bootstrap-clean maintainer-clean \
test check testoptional testall testlong testoptionallong testallong \
ptest ptestoptional ptestall ptestlong ptestoptionallong ptestallong \
diff --git a/README.md b/README.md
index 1233f17de73..aee0c3a0fbc 100644
--- a/README.md
+++ b/README.md
@@ -1,11 +1,16 @@
-
+
+
+
+
+
+
# Sage: Open Source Mathematical Software
> "Creating a Viable Open Source Alternative to
> Magma, Maple, Mathematica, and MATLAB"
-> Copyright (C) 2005-2022 The Sage Development Team
+> Copyright (C) 2005-2023 The Sage Development Team
https://www.sagemath.org
@@ -53,12 +58,14 @@ mailing list](https://groups.google.com/group/sage-devel).
--------------------------------
The preferred way to run Sage on Windows is using the [Windows Subsystem for
-Linux](https://docs.microsoft.com/en-us/windows/wsl/faq), which allows
+Linux](https://docs.microsoft.com/en-us/windows/wsl/faq), a.k.a. WSL, which allows
you to install a standard Linux distribution such as Ubuntu within
-your Windows. Then all instructions for installation in Linux apply.
+your Windows. Make sure you allocate WSL sufficient RAM; 5GB is known to work, while
+2GB might be not enough for building Sage from source.
+Then all instructions for installation in Linux apply.
As an alternative, you can also run Linux on Windows using Docker (see
-above) or other virtualization solutions.
+below) or other virtualization solutions.
[macOS] Preparing the Platform
------------------------------
@@ -182,8 +189,8 @@ in the Installation Guide.
3. [Linux, WSL] Install the required minimal build prerequisites.
- - Compilers: `gcc`, `gfortran`, `g++` (GCC 8.x to 12.x and recent
- versions of Clang (LLVM) are supported).
+ - Compilers: `gcc`, `gfortran`, `g++` (GCC versions from 8.4.0 to 13.x
+ and recent versions of Clang (LLVM) are supported).
See [build/pkgs/gcc/SPKG.rst](build/pkgs/gcc/SPKG.rst) and
[build/pkgs/gfortran/SPKG.rst](build/pkgs/gfortran/SPKG.rst)
for a discussion of suitable compilers.
@@ -194,7 +201,7 @@ in the Installation Guide.
more details.
- Python 3.4 or later, or Python 2.7, a full installation including
- `urllib`; but ideally version 3.8.x, 3.9.x, or 3.10.x, which
+ `urllib`; but ideally version 3.9.x, 3.10.x, or 3.11.x, which
will avoid having to build Sage's own copy of Python 3.
See [build/pkgs/python3/SPKG.rst](build/pkgs/python3/SPKG.rst)
for more details.
@@ -394,6 +401,32 @@ in the Installation Guide.
or JupyterLab installation, as described in [section
"Launching SageMath"](https://doc.sagemath.org/html/en/installation/launching.html)
in the installation manual.
+
+Alternative Installation using PyPI
+---------------
+
+For installation of `sage` in python using `pip` you need to install `sagemath-standard`. First, activate your python virtual environment and follow these steps:
+
+ $ python3 -m pip install sage_conf
+ $ ls $(sage-config SAGE_SPKG_WHEELS)
+ $ python3 -m pip install $(sage-config SAGE_SPKG_WHEELS)/*.whl
+ $ python3 -m pip install sagemath-standard
+
+You need to install `sage_conf`, a wheelhouse of various python packages. You can list the wheels using `ls $(sage-config SAGE_SPKG_WHEELS)`. After manual installation of these wheels, you can install the sage library, `sagemath-standard`.
+
+**NOTE:** You can find `sage` and `sagemath` pip packages but with these packages, you will encounter `ModuleNotFoundError`.
+
+SageMath Docker images
+----------------------
+
+[![Docker Status](http://dockeri.co/image/sagemath/sagemath)](https://hub.docker.com/r/sagemath/sagemath)
+
+SageMath is available on Docker Hub and can be downloaded by:
+``` bash
+docker pull sagemath/sagemath
+```
+
+Currently, only stable versions are kept up to date.
Troubleshooting
---------------
diff --git a/VERSION.txt b/VERSION.txt
index 8b731a891fa..3b70b866392 100644
--- a/VERSION.txt
+++ b/VERSION.txt
@@ -1 +1 @@
-SageMath version 10.0.beta1, Release Date: 2023-02-19
+SageMath version 10.2.beta5, Release Date: 2023-09-27
diff --git a/bootstrap b/bootstrap
index 48c4801d4b5..aa5755cfc63 100755
--- a/bootstrap
+++ b/bootstrap
@@ -35,10 +35,16 @@ CONFVERSION=$(cat $PKG/package-version.txt)
bootstrap () {
- if [ "${BOOTSTRAP_QUIET}" = "no" ]; then
- echo "bootstrap:$LINENO: installing 'm4/sage_spkg_configures.m4'"
- fi
- rm -f m4/sage_spkg_configures.m4
+ for a in m4/sage_spkg_configures.m4 m4/sage_spkg_versions.m4 m4/sage_spkg_versions_toml.m4; do
+ if [ "${BOOTSTRAP_QUIET}" = "no" ]; then
+ echo "bootstrap:$LINENO: installing '"$a"'"
+ fi
+ rm -f $a
+ echo "# Generated by SAGE_ROOT/bootstrap; do not edit" > $a
+ done
+ for a in m4/sage_spkg_versions.m4 m4/sage_spkg_versions_toml.m4; do
+ echo 'changequote(`>>>'"'"', `<<<'"')dnl" >> $a
+ done
spkg_configures=""
# initialize SAGE_ENABLE... options for standard packages
for pkgname in $(sage-package list :standard: | sort); do
@@ -92,17 +98,26 @@ SAGE_SPKG_CONFIGURE_$(echo ${pkgname} | tr '[a-z]' '[A-Z]')"
if test -f "$DIR/requirements.txt" -o -f "$DIR/install-requires.txt"; then
# A Python package
SPKG_TREE_VAR=SAGE_VENV
+ echo "define(>>>SPKG_INSTALL_REQUIRES_${pkgname}<<<, >>>$(echo $(ENABLE_SYSTEM_SITE_PACKAGES=yes sage-get-system-packages install-requires ${pkgname}))<<<)dnl" >> m4/sage_spkg_versions.m4
+ echo "define(>>>SPKG_INSTALL_REQUIRES_${pkgname}<<<, >>>$(echo $(ENABLE_SYSTEM_SITE_PACKAGES=yes sage-get-system-packages install-requires-toml ${pkgname}))<<<)dnl" >> m4/sage_spkg_versions_toml.m4
fi
fi
spkg_configures="$spkg_configures
SAGE_SPKG_FINALIZE([$pkgname], [$pkgtype], [$SPKG_SOURCE], [$SPKG_TREE_VAR])"
done
echo "$spkg_configures" >> m4/sage_spkg_configures.m4
+ for a in m4/sage_spkg_versions.m4 m4/sage_spkg_versions_toml.m4; do
+ echo 'changequote(>>>`<<<, >>>'"'"'<<<)dnl' >> $a
+ done
- for pkgname in $(sage-package list --has-file bootstrap); do
- (cd build/pkgs/$pkgname && ./bootstrap) || exit 1
+ for pkgname in $(sage-package list --has-file bootstrap "$@"); do
+ (cd build/pkgs/$pkgname && if [ -x bootstrap ]; then ./bootstrap; else echo >&2 "bootstrap:$LINENO: Nothing to do for $pkgname"; fi) || exit 1
done
+ if [ $# != 0 ]; then
+ return
+ fi
+
# Default to no filter if "-q" was not passed.
QUIET_SED_FILTER=""
if [ "${BOOTSTRAP_QUIET}" = "yes" ]; then
@@ -115,7 +130,6 @@ SAGE_SPKG_FINALIZE([$pkgname], [$pkgtype], [$SPKG_SOURCE], [$SPKG_TREE_VAR])"
# stdout alone. Basically we swap the two descriptors using a
# third, filter, and then swap them back.
./bootstrap-conda && \
- src/doc/bootstrap && \
aclocal -I m4 && \
automake --add-missing --copy build/make/Makefile-auto 3>&1 1>&2 2>&3 \
| sed "${QUIET_SED_FILTER}" 3>&1 1>&2 2>&3 && \
@@ -225,7 +239,7 @@ save () {
usage () {
- echo >&2 "Usage: $0 [-d|-D|-s] [-u ] [-h] [-q]"
+ echo >&2 "Usage: $0 [-d|-D|-s] [-u ] [-h] [-q] [SPKG...]"
echo >&2 ""
echo >&2 "Options:"
echo >&2 " -d fall back to downloading (released versions only)"
@@ -258,6 +272,7 @@ do
?) usage; exit 2;;
esac
done
+shift $(($OPTIND - 1))
export BOOTSTRAP_QUIET
CONFBALL="upstream/configure-$CONFVERSION.tar.gz"
@@ -267,14 +282,22 @@ if [ $DOWNLOAD$SAVE = yesyes ]; then
exit 2
fi
-# Start cleanly (it's not a problem if this fails)
+if [ $# != 0 -a $DOWNLOAD$ALWAYSDOWNLOAD$SAVE != nonono ]; then
+ echo >&2 "$0: Cannot combine -d, -D, -s, -u with SPKG arguments"
+ usage
+ exit 2
+fi
+
+# Start cleanly when a full bootstrap is happening (it's not a problem if this fails)
# POSIX supports two separate incompatible formats for the MAKEFLAGS
# variable, so instead of guessing, we simply define our own variable
# to optionally pass an "-s" (silent) flag to Make.
-MAKE_SILENT=""
-[ "${BOOTSTRAP_QUIET}" = "yes" ] && MAKE_SILENT="-s"
-$MAKE ${MAKE_SILENT} bootstrap-clean 2>/dev/null
-mkdir config 2>/dev/null
+if [ $# = 0 ]; then
+ MAKE_SILENT=""
+ [ "${BOOTSTRAP_QUIET}" = "yes" ] && MAKE_SILENT="-s"
+ $MAKE ${MAKE_SILENT} bootstrap-clean 2>/dev/null
+fi
+mkdir -p config 2>/dev/null
if [ $ALWAYSDOWNLOAD = yes ]; then
if [ -n "$CONFTARBALL_URL" ]; then
@@ -291,7 +314,7 @@ if [ $ALWAYSDOWNLOAD = yes ]; then
bootstrap_download || exit $?
fi
else
- bootstrap
+ bootstrap "$@"
fi
if [ $SAVE = yes ]; then
diff --git a/bootstrap-conda b/bootstrap-conda
index 9ba4b50ab6f..ed4bb9e0d08 100755
--- a/bootstrap-conda
+++ b/bootstrap-conda
@@ -11,14 +11,13 @@ STRIP_COMMENTS="sed s/#.*//;"
shopt -s extglob
DEVELOP_SPKG_PATTERN="@(_develop$(for a in $(head -n 1 build/pkgs/_develop/dependencies); do echo -n "|"$a; done))"
-
BOOTSTRAP_PACKAGES=$(echo $(${STRIP_COMMENTS} build/pkgs/_bootstrap/distros/conda.txt))
SYSTEM_PACKAGES=
OPTIONAL_SYSTEM_PACKAGES=
SAGELIB_SYSTEM_PACKAGES=
SAGELIB_OPTIONAL_SYSTEM_PACKAGES=
DEVELOP_SYSTEM_PACKAGES=
-for PKG_BASE in $(sage-package list --has-file distros/conda.txt); do
+for PKG_BASE in $(sage-package list --has-file distros/conda.txt --exclude _sagemath); do
PKG_SCRIPTS=build/pkgs/$PKG_BASE
SYSTEM_PACKAGES_FILE=$PKG_SCRIPTS/distros/conda.txt
PKG_TYPE=$(cat $PKG_SCRIPTS/type)
@@ -37,10 +36,13 @@ for PKG_BASE in $(sage-package list --has-file distros/conda.txt); do
;;
esac
else
- case "$PKG_TYPE" in
- standard)
+ case "$PKG_BASE:$PKG_TYPE" in
+ *:standard)
SAGELIB_SYSTEM_PACKAGES+=" $PKG_SYSTEM_PACKAGES"
;;
+ $DEVELOP_SPKG_PATTERN:*)
+ DEVELOP_SYSTEM_PACKAGES+=" $PKG_SYSTEM_PACKAGES"
+ ;;
*)
SAGELIB_OPTIONAL_SYSTEM_PACKAGES+=" $PKG_SYSTEM_PACKAGES"
;;
diff --git a/build/bin/sage-build-env-config.in b/build/bin/sage-build-env-config.in
index b00fd2a3f99..7d6cd113bf9 100644
--- a/build/bin/sage-build-env-config.in
+++ b/build/bin/sage-build-env-config.in
@@ -53,9 +53,13 @@ export SAGE_PARI_CFG="@SAGE_PARI_CFG@"
export SAGE_GLPK_PREFIX="@SAGE_GLPK_PREFIX@"
export SAGE_FREETYPE_PREFIX="@SAGE_FREETYPE_PREFIX@"
+export SAGE_PIP_INSTALL_FLAGS="@SAGE_PIP_INSTALL_FLAGS@"
export SAGE_SUITESPARSE_PREFIX="@SAGE_SUITESPARSE_PREFIX@"
export SAGE_CONFIGURE_FFLAS_FFPACK="@SAGE_CONFIGURE_FFLAS_FFPACK@"
export CONFIGURED_SAGE_EDITABLE="@SAGE_EDITABLE@"
export CONFIGURED_SAGE_WHEELS="@SAGE_WHEELS@"
+
+export ENABLE_SYSTEM_SITE_PACKAGES="@ENABLE_SYSTEM_SITE_PACKAGES@"
+export PYTHON_MINOR="@PYTHON_MINOR@"
diff --git a/build/bin/sage-clone-source b/build/bin/sage-clone-source
index 1b71e81999a..65dc1ffa93f 100755
--- a/build/bin/sage-clone-source
+++ b/build/bin/sage-clone-source
@@ -31,11 +31,13 @@ CONFBALL="$SRC/upstream/configure-$CONFVERSION.tar.gz"
rm -rf "$DST"
mkdir -p "$DST"
-git clone --origin trac "$SRC" "$DST"
+git clone --origin upstream "$SRC" "$DST"
cd "$DST"
-git remote set-url trac "$SAGE_REPO_ANONYMOUS"
-git remote set-url --push trac "$SAGE_REPO_AUTHENTICATED"
+git remote set-url upstream "$SAGE_REPO_ANONYMOUS"
+git remote set-url --push upstream "do not push to upstream"
+git remote add trac https://github.com/sagemath/sagetrac-mirror.git
+git remote set-url --push trac "do not push to trac"
# Save space
git gc --aggressive --prune=now
diff --git a/build/bin/sage-dist-helpers b/build/bin/sage-dist-helpers
index 4eac9626ffa..67a2201d31f 100644
--- a/build/bin/sage-dist-helpers
+++ b/build/bin/sage-dist-helpers
@@ -325,7 +325,11 @@ sdh_store_wheel() {
}
sdh_store_and_pip_install_wheel() {
- local pip_options=""
+ # The $SAGE_PIP_INSTALL_FLAGS variable is set by sage-build-env-config.
+ # We skip sanity checking its contents since you should either let sage
+ # decide what it contains, or really know what you are doing.
+ local pip_options="${SAGE_PIP_INSTALL_FLAGS}"
+
while [ $# -gt 0 ]; do
case $1 in
-*) pip_options="$pip_options $1"
diff --git a/build/bin/sage-get-system-packages b/build/bin/sage-get-system-packages
index b5153c53694..c8b9314c3fe 100755
--- a/build/bin/sage-get-system-packages
+++ b/build/bin/sage-get-system-packages
@@ -1,4 +1,5 @@
#!/bin/sh
+
SYSTEM=$1
if [ -z "$SYSTEM" ]; then
echo >&2 "usage: $0 {auto|debian|arch|conda|pip|...} SPKGS..."
@@ -10,18 +11,19 @@ SPKGS="$*"
if [ -z "$SAGE_ROOT" ]; then
SAGE_ROOT=`pwd`
fi
+
case "$SYSTEM" in
install-requires)
- # Collect install-requires.txt and output it in the format
+ # Collect install-requires.txt (falling back to requirements.txt) and output it in the format
# needed by setup.cfg [options] install_requires=
- SYSTEM_PACKAGES_FILE_NAMES="install-requires.txt"
+ SYSTEM_PACKAGES_FILE_NAMES="install-requires.txt requirements.txt"
STRIP_COMMENTS="sed s/#.*//;/^[[:space:]]*$/d;"
COLLECT=
;;
install-requires-toml)
- # Collect install-requires.txt and output it in the format
+ # Collect install-requires.txt (falling back to requirements.txt) and output it in the format
# needed by pyproject.toml [build-system] requires=
- SYSTEM_PACKAGES_FILE_NAMES="install-requires.txt"
+ SYSTEM_PACKAGES_FILE_NAMES="install-requires.txt requirements.txt"
STRIP_COMMENTS="sed s/#.*//;/^[[:space:]]*$/d;s/^/'/;s/$/',/;"
COLLECT=
;;
@@ -44,6 +46,18 @@ case "$SYSTEM" in
;;
esac
for PKG_BASE in $SPKGS; do
+
+ # Skip this package if it uses the SAGE_PYTHON_PACKAGE_CHECK
+ # macro and if --enable-system-site-packages was NOT passed
+ # to ./configure (or if ./configure has not yet been run).
+ SPKG_CONFIGURE="${SAGE_ROOT}/build/pkgs/${PKG_BASE}/spkg-configure.m4"
+ if [ -z "${ENABLE_SYSTEM_SITE_PACKAGES}" ]; then
+ if grep -q SAGE_PYTHON_PACKAGE_CHECK "${SPKG_CONFIGURE}" 2>/dev/null;
+ then
+ continue;
+ fi
+ fi
+
for NAME in $SYSTEM_PACKAGES_FILE_NAMES; do
SYSTEM_PACKAGES_FILE="$SAGE_ROOT"/build/pkgs/$PKG_BASE/$NAME
if [ -f $SYSTEM_PACKAGES_FILE ]; then
diff --git a/build/bin/sage-logger b/build/bin/sage-logger
index 1682ccbc079..1d8d92c0c75 100755
--- a/build/bin/sage-logger
+++ b/build/bin/sage-logger
@@ -89,8 +89,9 @@ else
# Redirect stdout and stderr to a subprocess running tee.
# We trap SIGINT such that SIGINT interrupts the main process being
# run, not the logging.
+
( exec 2>&1; eval "$cmd" ) | \
- ( trap '' SIGINT; tee -a "$logfile" | $SED )
+ ( trap '' SIGINT; if [ -n "$GITHUB_ACTIONS" -a -n "$prefix" ]; then echo "::group::${logname}"; fi; tee -a "$logfile" | $SED; if [ -n "$GITHUB_ACTIONS" -a -n "$prefix" ]; then echo "::endgroup::"; fi )
pipestatus=(${PIPESTATUS[*]})
diff --git a/build/bin/sage-spkg-info b/build/bin/sage-spkg-info
index 4e53139fa7e..e43e516dc5b 100755
--- a/build/bin/sage-spkg-info
+++ b/build/bin/sage-spkg-info
@@ -9,7 +9,7 @@ PKG_SCRIPTS="$SAGE_ROOT/build/pkgs/$PKG_BASE"
for ext in rst txt; do
SPKG_FILE="$PKG_SCRIPTS/SPKG.$ext"
if [ -f "$SPKG_FILE" ]; then
- cat "$SPKG_FILE"
+ sed "1,3s/^ *Sage: Open Source Mathematics Software:/$PKG_BASE:/" "$SPKG_FILE"
break
fi
done
@@ -110,17 +110,19 @@ if [ -z "$system" ]; then
echo "(none known)"
else
echo
- if [ -f "$PKG_SCRIPTS"/spkg-configure.m4 ]; then
- echo "If the system package is installed, ./configure will check whether it can be used."
- else
- echo "However, these system packages will not be used for building Sage"
- if [ -f "$PKG_SCRIPTS"/install-requires.txt ]; then
- echo "because using Python site-packages is not supported by the Sage distribution;"
- echo "see https://github.com/sagemath/sage/issues/29023"
+ SPKG_CONFIGURE="${PKG_SCRIPTS}/spkg-configure.m4"
+ if [ -f "${SPKG_CONFIGURE}" ]; then
+ if grep -q SAGE_PYTHON_PACKAGE_CHECK "${SPKG_CONFIGURE}"; then
+ echo "If the system package is installed and if the (experimental) option"
+ echo "--enable-system-site-packages is passed to ./configure, then ./configure"
+ echo "will check if the system package can be used."
else
- echo "because spkg-configure.m4 has not been written for this package;"
- echo "see https://github.com/sagemath/sage/issues/27330"
+ echo "If the system package is installed, ./configure will check if it can be used."
fi
+ else
+ echo "However, these system packages will not be used for building Sage"
+ echo "because spkg-configure.m4 has not been written for this package;"
+ echo "see https://github.com/sagemath/sage/issues/27330"
fi
fi
echo
diff --git a/build/bin/write-dockerfile.sh b/build/bin/write-dockerfile.sh
index 14cbaf786eb..6980f6b6e4b 100755
--- a/build/bin/write-dockerfile.sh
+++ b/build/bin/write-dockerfile.sh
@@ -5,7 +5,7 @@
set -e
SYSTEM="${1:-debian}"
shopt -s extglob
-SAGE_PACKAGE_LIST_ARGS="${2:- --has-file=spkg-configure.m4 :standard:}"
+SAGE_PACKAGE_LIST_ARGS="${2:-:standard:}"
WITH_SYSTEM_SPKG="${3:-yes}"
IGNORE_MISSING_SYSTEM_PACKAGES="${4:-no}"
EXTRA_SAGE_PACKAGES="${5:-_bootstrap}"
@@ -15,17 +15,13 @@ SAGE_ROOT=.
export PATH="$SAGE_ROOT"/build/bin:$PATH
SYSTEM_PACKAGES=$EXTRA_SYSTEM_PACKAGES
CONFIGURE_ARGS="--enable-option-checking "
-for PKG_BASE in $(sage-package list --has-file=distros/$SYSTEM.txt $SAGE_PACKAGE_LIST_ARGS) $EXTRA_SAGE_PACKAGES; do
- PKG_SCRIPTS="$SAGE_ROOT"/build/pkgs/$PKG_BASE
- if [ -d $PKG_SCRIPTS ]; then
- SYSTEM_PACKAGES_FILE=$PKG_SCRIPTS/distros/$SYSTEM.txt
- PKG_SYSTEM_PACKAGES=$(echo $(${STRIP_COMMENTS} $SYSTEM_PACKAGES_FILE))
- if [ -n "PKG_SYSTEM_PACKAGES" ]; then
- SYSTEM_PACKAGES+=" $PKG_SYSTEM_PACKAGES"
- if [ -f $PKG_SCRIPTS/spkg-configure.m4 ]; then
- CONFIGURE_ARGS+="--with-system-$PKG_BASE=${WITH_SYSTEM_SPKG} "
- fi
- fi
+for SPKG in $(sage-package list --has-file=spkg-configure.m4 $SAGE_PACKAGE_LIST_ARGS) $EXTRA_SAGE_PACKAGES; do
+ SYSTEM_PACKAGE=$(sage-get-system-packages $SYSTEM $SPKG | sed 's/${PYTHON_MINOR}/'${PYTHON_MINOR}'/g')
+ if [ -n "${SYSTEM_PACKAGE}" ]; then
+ # SYSTEM_PACKAGE can be empty if, for example, the environment
+ # variable ENABLE_SYSTEM_SITE_PACKAGES is empty.
+ SYSTEM_PACKAGES+=" ${SYSTEM_PACKAGE}"
+ CONFIGURE_ARGS+="--with-system-${SPKG}=${WITH_SYSTEM_SPKG} "
fi
done
echo "# Automatically generated by SAGE_ROOT/build/bin/write-dockerfile.sh"
diff --git a/build/make/Makefile.in b/build/make/Makefile.in
index 84f6f877e3d..cc004d08c3c 100644
--- a/build/make/Makefile.in
+++ b/build/make/Makefile.in
@@ -128,7 +128,29 @@ PIP_PACKAGES = @SAGE_PIP_PACKAGES@
# Packages that use the 'script' package build rules
SCRIPT_PACKAGES = @SAGE_SCRIPT_PACKAGES@
-
+# Packages for which we build wheels for PyPI
+PYPI_WHEEL_PACKAGES = \
+ sage_sws2rst \
+ sage_setup \
+ sagemath_environment \
+ sagemath_objects \
+ sagemath_repl \
+ sagemath_categories \
+ sagemath_bliss \
+ sagemath_mcqd \
+ sagemath_tdlib \
+ sagemath_coxeter3 \
+ sagemath_sirocco \
+ sagemath_meataxe
+
+# sage_docbuild is here, not in PYPI_WHEEL_PACKAGES, because it depends on sagelib
+WHEEL_PACKAGES = $(PYPI_WHEEL_PACKAGES) \
+ sage_conf \
+ sagelib \
+ sage_docbuild
+
+# Packages for which build sdists for PyPI
+PYPI_SDIST_PACKAGES = $(WHEEL_PACKAGES)
# Generate the actual inst_ variables; for each package that is
# actually built this generates a line like:
@@ -198,17 +220,20 @@ SAGE_I_TARGETS = sagelib doc
# Tell make not to look for files with these names:
.PHONY: all all-sage all-toolchain all-build all-sageruntime \
all-start build-start base toolchain toolchain-deps base-toolchain \
+ pypi-sdists pypi-wheels wheels \
sagelib \
doc doc-html doc-html-jsmath doc-html-mathjax doc-pdf \
doc-uninstall \
python3_venv _clean-broken-gcc
PYTHON_FOR_VENV = @PYTHON_FOR_VENV@
+PYTHON_MINOR = @PYTHON_MINOR@
+SAGE_VENV_FLAGS = @SAGE_VENV_FLAGS@
ifneq ($(PYTHON_FOR_VENV),)
# Special rule for making the Python virtualenv from the system Python (Python
# 3 only). $(PYTHON) is set in Makefile to python3_venv.
-# Thus $(inst_python3_venv) will be the dependency of every Python package.
+# Thus $(inst_python3_venv) will an (order-only) dependency of every Python package.
#
# TODO: If we reconfigure to build our own Python after having used the system
# Python, files installed to create the virtualenv should be *removed*. That
@@ -216,10 +241,15 @@ ifneq ($(PYTHON_FOR_VENV),)
ifeq ($(PYTHON),python3)
PYTHON = python3_venv
endif
-inst_python3_venv = $(SAGE_VENV)/pyvenv.cfg
+inst_python3_venv = $(SAGE_VENV)/$(SPKG_INST_RELDIR)/python3_venv-3.$(PYTHON_MINOR)-$(subst /,-,$(PYTHON_FOR_VENV))$(findstring --system-site-packages,$(SAGE_VENV_FLAGS))
+
+$(SAGE_VENV)/$(SPKG_INST_RELDIR):
+ mkdir -p "$@"
-$(inst_python3_venv):
- $(PYTHON_FOR_VENV) $(SAGE_ROOT)/build/bin/sage-venv "$(SAGE_VENV)"
+$(inst_python3_venv): | $(SAGE_VENV)/$(SPKG_INST_RELDIR)
+ $(PYTHON_FOR_VENV) $(SAGE_ROOT)/build/bin/sage-venv $(SAGE_VENV_FLAGS) "$(SAGE_VENV)"
+ rm -f "$(SAGE_VENV)/$(SPKG_INST_RELDIR)"/python3_venv-*
+ touch "$@"
endif
# Build everything and start Sage.
@@ -418,6 +448,25 @@ list-broken-packages: auditwheel_or_delocate
echo >&2 "$$fix_broken_packages"; \
fi
+pypi-sdists: $(PYPI_SDIST_PACKAGES:%=%-sdist)
+ @echo "Built sdists are in upstream/"
+
+# Ensuring wheels are present, even for packages that may have been installed
+# as editable. Until we have better uninstallation of script packages, we
+# just remove the timestamps, which will lead to rebuilds of the packages.
+pypi-wheels:
+ for a in $(PYPI_WHEEL_PACKAGES); do \
+ rm -f $(SAGE_VENV)/var/lib/sage/installed/$$a-*; \
+ done
+ $(MAKE_REC) SAGE_EDITABLE=no SAGE_WHEELS=yes $(PYPI_WHEEL_PACKAGES)
+ @echo "Built wheels are in venv/var/lib/sage/wheels/"
+
+wheels:
+ for a in $(WHEEL_PACKAGES); do \
+ rm -f $(SAGE_VENV)/var/lib/sage/installed/$$a-*; \
+ done
+ $(MAKE_REC) SAGE_EDITABLE=no SAGE_WHEELS=yes $(WHEEL_PACKAGES)
+ @echo "Built wheels are in venv/var/lib/sage/wheels/"
#==============================================================================
# Setting SAGE_CHECK... variables
@@ -549,6 +598,8 @@ $$($(4))/$(SPKG_INST_RELDIR)/$(1)-$(2): $(3)
$(1): $$($(4))/$(SPKG_INST_RELDIR)/$(1)-$(2)
+$(1)-ensure: $(inst_$(1))
+
$(1)-$(4)-no-deps:
+@if [ -z '$$($(4))' ]; then \
echo "Error: The installation tree $(4) has been disabled" 2>&1; \
@@ -607,6 +658,8 @@ $(1)-build-deps: $(2)
$(1): $(2)
+$(MAKE_REC) $(1)-no-deps
+$(1)-ensure: $(inst_$(1))
+
$(1)-no-deps:
$(AM_V_at)sage-logger -p 'sage --pip install -r "$$(SAGE_ROOT)/build/pkgs/$(1)/requirements.txt"' '$$(SAGE_LOGS)/$(1).log'
@@ -663,6 +716,8 @@ $$($(4))/$(SPKG_INST_RELDIR)/$(1)-$(2): $(3)
$(1): $$($(4))/$(SPKG_INST_RELDIR)/$(1)-$(2)
+$(1)-ensure: $(inst_$(1))
+
$(1)-$(4)-no-deps:
$(PLUS)@if [ -z '$$($(4))' ]; then \
echo "Error: The installation tree $(4) has been disabled" 2>&1; \
@@ -711,6 +766,13 @@ $(1)-uninstall: $(1)-$(4)-uninstall
$(1)-clean: $(1)-uninstall
+$(1)-sdist: FORCE python_build sage_setup cython
+ $(AM_V_at) cd '$$(SAGE_ROOT)' && \
+ . '$$(SAGE_ROOT)/src/bin/sage-src-env-config' && \
+ . '$$(SAGE_ROOT)/src/bin/sage-env-config' && \
+ . '$$(SAGE_ROOT)/src/bin/sage-env' && \
+ '$$(SAGE_ROOT)/build/pkgs/$(1)/spkg-src'
+
# Recursive tox invocation (note - we do not set the environment here).
# Setting SAGE_SPKG_WHEELS is for the benefit of sagelib's tox.ini
$(1)-tox-%: FORCE
diff --git a/build/pkgs/4ti2/checksums.ini b/build/pkgs/4ti2/checksums.ini
index 22971f1bd4d..275cf157a6a 100644
--- a/build/pkgs/4ti2/checksums.ini
+++ b/build/pkgs/4ti2/checksums.ini
@@ -1,4 +1,5 @@
tarball=4ti2-VERSION.tar.gz
-sha1=9a9a6913bcd52b667355a7df7fa954ca101054cb
-md5=90def0d6b01a0247e439356777467497
-cksum=439147307
+sha1=3d41f30ea3ef94c293eae30c087494269fc1a6b9
+md5=1215872325ddfc561865ecb22b2bccb2
+cksum=2439180289
+upstream_url=https://github.com/4ti2/4ti2/releases/download/Release_1_6_10/4ti2-1.6.10.tar.gz
diff --git a/build/pkgs/4ti2/package-version.txt b/build/pkgs/4ti2/package-version.txt
index ba598ce6877..1df3b822c71 100644
--- a/build/pkgs/4ti2/package-version.txt
+++ b/build/pkgs/4ti2/package-version.txt
@@ -1 +1 @@
-1.6.7.p0
+1.6.10
diff --git a/build/pkgs/_bootstrap/distros/conda.txt b/build/pkgs/_bootstrap/distros/conda.txt
index b5d2db8cfb2..5fe960ac3a0 100644
--- a/build/pkgs/_bootstrap/distros/conda.txt
+++ b/build/pkgs/_bootstrap/distros/conda.txt
@@ -1,2 +1,5 @@
# Packages needed for ./bootstrap
-autoconf automake libtool
+autoconf
+automake
+libtool
+pkg-config
diff --git a/build/pkgs/_bootstrap/distros/slackware.txt b/build/pkgs/_bootstrap/distros/slackware.txt
index 4bd0e6e12b5..01f7ebb4e83 100644
--- a/build/pkgs/_bootstrap/distros/slackware.txt
+++ b/build/pkgs/_bootstrap/distros/slackware.txt
@@ -2,3 +2,4 @@
autoconf
automake
libtool
+pkg-config
diff --git a/build/pkgs/_bootstrap/distros/void.txt b/build/pkgs/_bootstrap/distros/void.txt
index 159333b4216..6490cf773e7 100644
--- a/build/pkgs/_bootstrap/distros/void.txt
+++ b/build/pkgs/_bootstrap/distros/void.txt
@@ -1,3 +1,4 @@
# Packages needed for ./bootstrap
autoconf automake libtool
xtools mk-configure
+pkg-config
diff --git a/build/pkgs/_prereq/distros/conda.txt b/build/pkgs/_prereq/distros/conda.txt
index a02a39e73bf..d76388ce7bb 100644
--- a/build/pkgs/_prereq/distros/conda.txt
+++ b/build/pkgs/_prereq/distros/conda.txt
@@ -5,4 +5,3 @@ perl
python
tar
bc
-pkg-config
diff --git a/build/pkgs/_prereq/distros/fedora.txt b/build/pkgs/_prereq/distros/fedora.txt
index 79919eef51b..b35d7f64faf 100644
--- a/build/pkgs/_prereq/distros/fedora.txt
+++ b/build/pkgs/_prereq/distros/fedora.txt
@@ -30,7 +30,6 @@ gcc-c++
# The need for which comes [...] from MPIR's configure script
findutils
which
-# Needed for pcre configure, see https://github.com/sagemath/sage/issues/29129:
diffutils
# Needed for openssl 3.0
perl-IPC-Cmd
diff --git a/build/pkgs/_prereq/distros/slackware.txt b/build/pkgs/_prereq/distros/slackware.txt
index 4c2b7080ce8..4c957e45264 100644
--- a/build/pkgs/_prereq/distros/slackware.txt
+++ b/build/pkgs/_prereq/distros/slackware.txt
@@ -14,6 +14,5 @@ python3 # on slackware-current
flex
# for https upstream_url downloads
ca-certificates
-pkg-config
libxml2
cyrus-sasl
diff --git a/build/pkgs/_prereq/distros/void.txt b/build/pkgs/_prereq/distros/void.txt
index da5cd5330ee..552b5a415f2 100644
--- a/build/pkgs/_prereq/distros/void.txt
+++ b/build/pkgs/_prereq/distros/void.txt
@@ -5,7 +5,6 @@ libgomp-devel
m4
make
perl
-pkg-config
python3
tar
which
diff --git a/build/pkgs/_python3.8/distros/arch.txt b/build/pkgs/_python3.8/distros/arch.txt
deleted file mode 100644
index 398ae3228b3..00000000000
--- a/build/pkgs/_python3.8/distros/arch.txt
+++ /dev/null
@@ -1 +0,0 @@
-python38
diff --git a/build/pkgs/_python3.8/distros/cygwin.txt b/build/pkgs/_python3.8/distros/cygwin.txt
deleted file mode 100644
index 398ae3228b3..00000000000
--- a/build/pkgs/_python3.8/distros/cygwin.txt
+++ /dev/null
@@ -1 +0,0 @@
-python38
diff --git a/build/pkgs/_python3.8/distros/debian.txt b/build/pkgs/_python3.8/distros/debian.txt
deleted file mode 100644
index bf46e908ff6..00000000000
--- a/build/pkgs/_python3.8/distros/debian.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-python3.8
-python3.8-dev
-python3.8-distutils
-python3.8-venv
diff --git a/build/pkgs/_python3.8/distros/fedora.txt b/build/pkgs/_python3.8/distros/fedora.txt
deleted file mode 100644
index 1f9ac08ba8e..00000000000
--- a/build/pkgs/_python3.8/distros/fedora.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-python38
-python38-devel
diff --git a/build/pkgs/_python3.8/distros/freebsd.txt b/build/pkgs/_python3.8/distros/freebsd.txt
deleted file mode 100644
index 398ae3228b3..00000000000
--- a/build/pkgs/_python3.8/distros/freebsd.txt
+++ /dev/null
@@ -1 +0,0 @@
-python38
diff --git a/build/pkgs/_python3.8/distros/homebrew.txt b/build/pkgs/_python3.8/distros/homebrew.txt
deleted file mode 100644
index ea9989e790c..00000000000
--- a/build/pkgs/_python3.8/distros/homebrew.txt
+++ /dev/null
@@ -1 +0,0 @@
-python@3.8
diff --git a/build/pkgs/_python3.8/distros/macports.txt b/build/pkgs/_python3.8/distros/macports.txt
deleted file mode 100644
index 398ae3228b3..00000000000
--- a/build/pkgs/_python3.8/distros/macports.txt
+++ /dev/null
@@ -1 +0,0 @@
-python38
diff --git a/build/pkgs/_python3.8/distros/opensuse.txt b/build/pkgs/_python3.8/distros/opensuse.txt
deleted file mode 100644
index 1f9ac08ba8e..00000000000
--- a/build/pkgs/_python3.8/distros/opensuse.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-python38
-python38-devel
diff --git a/build/pkgs/admcycles/dependencies b/build/pkgs/admcycles/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/admcycles/dependencies
+++ b/build/pkgs/admcycles/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/alabaster/dependencies b/build/pkgs/alabaster/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/alabaster/dependencies
+++ b/build/pkgs/alabaster/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/alabaster/distros/gentoo.txt b/build/pkgs/alabaster/distros/gentoo.txt
new file mode 100644
index 00000000000..cffece61b27
--- /dev/null
+++ b/build/pkgs/alabaster/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/alabaster
diff --git a/build/pkgs/alabaster/distros/opensuse.txt b/build/pkgs/alabaster/distros/opensuse.txt
index dcc39f9e849..debe990ce19 100644
--- a/build/pkgs/alabaster/distros/opensuse.txt
+++ b/build/pkgs/alabaster/distros/opensuse.txt
@@ -1 +1 @@
-python3-alabaster
+python3${PYTHON_MINOR}-alabaster
diff --git a/build/pkgs/alabaster/spkg-configure.m4 b/build/pkgs/alabaster/spkg-configure.m4
new file mode 100644
index 00000000000..4eca6e05a5f
--- /dev/null
+++ b/build/pkgs/alabaster/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([alabaster], [SAGE_PYTHON_PACKAGE_CHECK([alabaster])])
diff --git a/build/pkgs/appdirs/dependencies b/build/pkgs/appdirs/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/appdirs/dependencies
+++ b/build/pkgs/appdirs/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/appnope/dependencies b/build/pkgs/appnope/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/appnope/dependencies
+++ b/build/pkgs/appnope/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/argon2_cffi/dependencies b/build/pkgs/argon2_cffi/dependencies
index 70a583a0dbf..920046ab33d 100644
--- a/build/pkgs/argon2_cffi/dependencies
+++ b/build/pkgs/argon2_cffi/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) six | $(PYTHON_TOOLCHAIN) cffi
+ argon2_cffi_bindings | $(PYTHON_TOOLCHAIN) flit_core $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/argon2_cffi/distros/gentoo.txt b/build/pkgs/argon2_cffi/distros/gentoo.txt
new file mode 100644
index 00000000000..2f12ca869e4
--- /dev/null
+++ b/build/pkgs/argon2_cffi/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/argon2-cffi
diff --git a/build/pkgs/argon2_cffi/spkg-configure.m4 b/build/pkgs/argon2_cffi/spkg-configure.m4
new file mode 100644
index 00000000000..03ebc634492
--- /dev/null
+++ b/build/pkgs/argon2_cffi/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([argon2_cffi], [SAGE_PYTHON_PACKAGE_CHECK([argon2_cffi])])
diff --git a/build/pkgs/argon2_cffi/spkg-install.in b/build/pkgs/argon2_cffi/spkg-install.in
index 359ae695eac..37ac1a53437 100644
--- a/build/pkgs/argon2_cffi/spkg-install.in
+++ b/build/pkgs/argon2_cffi/spkg-install.in
@@ -1,6 +1,2 @@
cd src
-if [ "$SAGE_FAT_BINARY" = "yes" ]; then
- # https://argon2-cffi.readthedocs.io/en/stable/installation.html
- export ARGON2_CFFI_USE_SSE2=0
-fi
sdh_pip_install .
diff --git a/build/pkgs/argon2_cffi_bindings/SPKG.rst b/build/pkgs/argon2_cffi_bindings/SPKG.rst
new file mode 100644
index 00000000000..3d9a76114f1
--- /dev/null
+++ b/build/pkgs/argon2_cffi_bindings/SPKG.rst
@@ -0,0 +1,18 @@
+argon2_cffi_bindings: Low-level CFFI bindings for Argon2
+========================================================
+
+Description
+-----------
+
+Low-level CFFI bindings for Argon2
+
+License
+-------
+
+MIT
+
+Upstream Contact
+----------------
+
+https://pypi.org/project/argon2-cffi-bindings/
+
diff --git a/build/pkgs/argon2_cffi_bindings/checksums.ini b/build/pkgs/argon2_cffi_bindings/checksums.ini
new file mode 100644
index 00000000000..fe2e878a74d
--- /dev/null
+++ b/build/pkgs/argon2_cffi_bindings/checksums.ini
@@ -0,0 +1,5 @@
+tarball=argon2-cffi-bindings-VERSION.tar.gz
+sha1=5a9b8906d9ca73c53c2bf0a2f0a8127fda69e965
+md5=f1591e1af7dea9ef3e5b982e2c196c1d
+cksum=2420586823
+upstream_url=https://pypi.io/packages/source/a/argon2_cffi_bindings/argon2-cffi-bindings-VERSION.tar.gz
diff --git a/build/pkgs/argon2_cffi_bindings/dependencies b/build/pkgs/argon2_cffi_bindings/dependencies
new file mode 100644
index 00000000000..4b9d24ccf44
--- /dev/null
+++ b/build/pkgs/argon2_cffi_bindings/dependencies
@@ -0,0 +1,4 @@
+ | $(PYTHON_TOOLCHAIN) cffi setuptools_scm $(PYTHON)
+
+----------
+All lines of this file are ignored except the first.
diff --git a/build/pkgs/argon2_cffi_bindings/install-requires.txt b/build/pkgs/argon2_cffi_bindings/install-requires.txt
new file mode 100644
index 00000000000..50485097375
--- /dev/null
+++ b/build/pkgs/argon2_cffi_bindings/install-requires.txt
@@ -0,0 +1 @@
+argon2-cffi-bindings
diff --git a/build/pkgs/argon2_cffi_bindings/package-version.txt b/build/pkgs/argon2_cffi_bindings/package-version.txt
new file mode 100644
index 00000000000..b295a689e74
--- /dev/null
+++ b/build/pkgs/argon2_cffi_bindings/package-version.txt
@@ -0,0 +1 @@
+21.2.0
diff --git a/build/pkgs/argon2_cffi_bindings/spkg-install.in b/build/pkgs/argon2_cffi_bindings/spkg-install.in
new file mode 100644
index 00000000000..359ae695eac
--- /dev/null
+++ b/build/pkgs/argon2_cffi_bindings/spkg-install.in
@@ -0,0 +1,6 @@
+cd src
+if [ "$SAGE_FAT_BINARY" = "yes" ]; then
+ # https://argon2-cffi.readthedocs.io/en/stable/installation.html
+ export ARGON2_CFFI_USE_SSE2=0
+fi
+sdh_pip_install .
diff --git a/build/pkgs/backports_zoneinfo/type b/build/pkgs/argon2_cffi_bindings/type
similarity index 100%
rename from build/pkgs/backports_zoneinfo/type
rename to build/pkgs/argon2_cffi_bindings/type
diff --git a/build/pkgs/asttokens/dependencies b/build/pkgs/asttokens/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/asttokens/dependencies
+++ b/build/pkgs/asttokens/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/asttokens/distros/gentoo.txt b/build/pkgs/asttokens/distros/gentoo.txt
new file mode 100644
index 00000000000..4aae3fb6cb4
--- /dev/null
+++ b/build/pkgs/asttokens/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/asttokens
diff --git a/build/pkgs/asttokens/spkg-configure.m4 b/build/pkgs/asttokens/spkg-configure.m4
new file mode 100644
index 00000000000..2221fabfb2b
--- /dev/null
+++ b/build/pkgs/asttokens/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([asttokens], [SAGE_PYTHON_PACKAGE_CHECK([asttokens])])
diff --git a/build/pkgs/attrs/dependencies b/build/pkgs/attrs/dependencies
index 4361e46ddaf..9be6b4aab7c 100644
--- a/build/pkgs/attrs/dependencies
+++ b/build/pkgs/attrs/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) vcversioner | $(PYTHON_TOOLCHAIN)
+ vcversioner | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/attrs/distros/gentoo.txt b/build/pkgs/attrs/distros/gentoo.txt
new file mode 100644
index 00000000000..3b906facd3a
--- /dev/null
+++ b/build/pkgs/attrs/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/attrs
diff --git a/build/pkgs/attrs/spkg-configure.m4 b/build/pkgs/attrs/spkg-configure.m4
new file mode 100644
index 00000000000..ba6a9b71efa
--- /dev/null
+++ b/build/pkgs/attrs/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([attrs], [SAGE_PYTHON_PACKAGE_CHECK([attrs])])
diff --git a/build/pkgs/auditwheel_or_delocate/dependencies b/build/pkgs/auditwheel_or_delocate/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/auditwheel_or_delocate/dependencies
+++ b/build/pkgs/auditwheel_or_delocate/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/awali/dependencies b/build/pkgs/awali/dependencies
index b125e2ded92..09b60167a34 100644
--- a/build/pkgs/awali/dependencies
+++ b/build/pkgs/awali/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) cmake cython nbconvert ncurses
+ cmake cython nbconvert ncurses | $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/babel/dependencies b/build/pkgs/babel/dependencies
index 41462907c20..802e470da86 100644
--- a/build/pkgs/babel/dependencies
+++ b/build/pkgs/babel/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN) pytz
+ | $(PYTHON_TOOLCHAIN) pytz $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/babel/distros/arch.txt b/build/pkgs/babel/distros/arch.txt
new file mode 100644
index 00000000000..f2d828ccc5c
--- /dev/null
+++ b/build/pkgs/babel/distros/arch.txt
@@ -0,0 +1 @@
+python-babel
diff --git a/build/pkgs/babel/distros/debian.txt b/build/pkgs/babel/distros/debian.txt
new file mode 100644
index 00000000000..e623eb68a31
--- /dev/null
+++ b/build/pkgs/babel/distros/debian.txt
@@ -0,0 +1 @@
+python3-babel
diff --git a/build/pkgs/babel/distros/fedora.txt b/build/pkgs/babel/distros/fedora.txt
new file mode 100644
index 00000000000..98f65931c4c
--- /dev/null
+++ b/build/pkgs/babel/distros/fedora.txt
@@ -0,0 +1 @@
+babel
diff --git a/build/pkgs/babel/distros/gentoo.txt b/build/pkgs/babel/distros/gentoo.txt
new file mode 100644
index 00000000000..2d2c34fb697
--- /dev/null
+++ b/build/pkgs/babel/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/Babel
diff --git a/build/pkgs/babel/distros/opensuse.txt b/build/pkgs/babel/distros/opensuse.txt
index 70bb05b1327..6372e2e77da 100644
--- a/build/pkgs/babel/distros/opensuse.txt
+++ b/build/pkgs/babel/distros/opensuse.txt
@@ -1 +1 @@
-python3-Babel
+python3${PYTHON_MINOR}-Babel
diff --git a/build/pkgs/babel/install-requires.txt b/build/pkgs/babel/install-requires.txt
index 1d0d6191bb9..b4db5e907f3 100644
--- a/build/pkgs/babel/install-requires.txt
+++ b/build/pkgs/babel/install-requires.txt
@@ -1 +1 @@
-babel >=2.6.0
+babel >=2.11.0
diff --git a/build/pkgs/babel/spkg-configure.m4 b/build/pkgs/babel/spkg-configure.m4
new file mode 100644
index 00000000000..d7b9a71c811
--- /dev/null
+++ b/build/pkgs/babel/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([babel], [SAGE_PYTHON_PACKAGE_CHECK([babel])])
diff --git a/build/pkgs/backcall/dependencies b/build/pkgs/backcall/dependencies
index 902a5feed13..4fedbe70cd1 100644
--- a/build/pkgs/backcall/dependencies
+++ b/build/pkgs/backcall/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN) flit_core tomli
+ | $(PYTHON_TOOLCHAIN) flit_core tomli $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/backcall/distros/gentoo.txt b/build/pkgs/backcall/distros/gentoo.txt
new file mode 100644
index 00000000000..266a222c558
--- /dev/null
+++ b/build/pkgs/backcall/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/backcall
diff --git a/build/pkgs/backcall/spkg-configure.m4 b/build/pkgs/backcall/spkg-configure.m4
new file mode 100644
index 00000000000..d6b11c49884
--- /dev/null
+++ b/build/pkgs/backcall/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([backcall], [SAGE_PYTHON_PACKAGE_CHECK([backcall])])
diff --git a/build/pkgs/backports_zoneinfo/SPKG.rst b/build/pkgs/backports_zoneinfo/SPKG.rst
deleted file mode 100644
index deaed349122..00000000000
--- a/build/pkgs/backports_zoneinfo/SPKG.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-backports_zoneinfo: Backport of the standard library zoneinfo module
-====================================================================
-
-Description
------------
-
-Backport of the standard library zoneinfo module for Python 3.8
-
-License
--------
-
-Apache-2.0
-
-Upstream Contact
-----------------
-
-https://pypi.org/project/backports.zoneinfo/
-
diff --git a/build/pkgs/backports_zoneinfo/checksums.ini b/build/pkgs/backports_zoneinfo/checksums.ini
deleted file mode 100644
index 1af2250d48c..00000000000
--- a/build/pkgs/backports_zoneinfo/checksums.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-tarball=backports.zoneinfo-VERSION.tar.gz
-sha1=8015a85e499ceda8b526f907a2a60083f7004aa4
-md5=d51faaaed4a1d5158dcfcef90355e805
-cksum=2001250429
-upstream_url=https://pypi.io/packages/source/b/backports.zoneinfo/backports.zoneinfo-VERSION.tar.gz
diff --git a/build/pkgs/backports_zoneinfo/distros/conda.txt b/build/pkgs/backports_zoneinfo/distros/conda.txt
deleted file mode 100644
index 5a8be642f33..00000000000
--- a/build/pkgs/backports_zoneinfo/distros/conda.txt
+++ /dev/null
@@ -1 +0,0 @@
-backports.zoneinfo
diff --git a/build/pkgs/backports_zoneinfo/install-requires.txt b/build/pkgs/backports_zoneinfo/install-requires.txt
deleted file mode 100644
index 5a8be642f33..00000000000
--- a/build/pkgs/backports_zoneinfo/install-requires.txt
+++ /dev/null
@@ -1 +0,0 @@
-backports.zoneinfo
diff --git a/build/pkgs/backports_zoneinfo/package-version.txt b/build/pkgs/backports_zoneinfo/package-version.txt
deleted file mode 100644
index 0c62199f16a..00000000000
--- a/build/pkgs/backports_zoneinfo/package-version.txt
+++ /dev/null
@@ -1 +0,0 @@
-0.2.1
diff --git a/build/pkgs/backports_zoneinfo/spkg-install.in b/build/pkgs/backports_zoneinfo/spkg-install.in
deleted file mode 100644
index 83aff6031e8..00000000000
--- a/build/pkgs/backports_zoneinfo/spkg-install.in
+++ /dev/null
@@ -1,6 +0,0 @@
-cd src
-if python3 -c 'import sys; sys.exit(0 if sys.hexversion < 0x03090000 else 1)'; then
- sdh_pip_install .
-else
- echo >&2 "Skipping install, not needed for Python >= 3.9"
-fi
diff --git a/build/pkgs/barvinok/SPKG.rst b/build/pkgs/barvinok/SPKG.rst
index 32657de16e4..0e0c5bdf6f5 100644
--- a/build/pkgs/barvinok/SPKG.rst
+++ b/build/pkgs/barvinok/SPKG.rst
@@ -17,4 +17,5 @@ GPL v2
Upstream Contact
----------------
-- http://groups.google.com/group/isl-development
+- https://sourceforge.net/projects/barvinok/
+- https://groups.google.com/group/isl-development
diff --git a/build/pkgs/barvinok/checksums.ini b/build/pkgs/barvinok/checksums.ini
index 1293ccdf34d..fce4148ce81 100644
--- a/build/pkgs/barvinok/checksums.ini
+++ b/build/pkgs/barvinok/checksums.ini
@@ -1,4 +1,5 @@
-tarball=barvinok-VERSION.tar.bz2
-sha1=31c50d4b2a4cebe049072fd54c6e41ccece5ec1d
-md5=60082222a73b2d4fd430da7b770a4072
-cksum=355377045
+tarball=barvinok-VERSION.tar.xz
+sha1=1e17e72732f7e96017d9ae0c3394c3c77c185f2e
+md5=57066c5aa5628b89345c16ed95f93d7e
+cksum=2863920036
+upstream_url=https://sourceforge.net/projects/barvinok/files/barvinok-VERSION.tar.xz
diff --git a/build/pkgs/barvinok/distros/freebsd.txt b/build/pkgs/barvinok/distros/freebsd.txt
new file mode 100644
index 00000000000..811ed3d1f34
--- /dev/null
+++ b/build/pkgs/barvinok/distros/freebsd.txt
@@ -0,0 +1 @@
+math/barvinok
diff --git a/build/pkgs/barvinok/package-version.txt b/build/pkgs/barvinok/package-version.txt
index 9ed317fb462..dbbcc2c2bba 100644
--- a/build/pkgs/barvinok/package-version.txt
+++ b/build/pkgs/barvinok/package-version.txt
@@ -1 +1 @@
-0.41.1
+0.41.7
diff --git a/build/pkgs/beautifulsoup4/dependencies b/build/pkgs/beautifulsoup4/dependencies
index 01af7f65566..c9982dd1882 100644
--- a/build/pkgs/beautifulsoup4/dependencies
+++ b/build/pkgs/beautifulsoup4/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) soupsieve | $(PYTHON_TOOLCHAIN)
+ soupsieve | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/beautifulsoup4/distros/alpine.txt b/build/pkgs/beautifulsoup4/distros/alpine.txt
new file mode 100644
index 00000000000..63787cb8abf
--- /dev/null
+++ b/build/pkgs/beautifulsoup4/distros/alpine.txt
@@ -0,0 +1 @@
+py3-beautifulsoup4
diff --git a/build/pkgs/beautifulsoup4/distros/arch.txt b/build/pkgs/beautifulsoup4/distros/arch.txt
new file mode 100644
index 00000000000..2e8f9a66b24
--- /dev/null
+++ b/build/pkgs/beautifulsoup4/distros/arch.txt
@@ -0,0 +1 @@
+python-beautifulsoup4
diff --git a/build/pkgs/beautifulsoup4/distros/debian.txt b/build/pkgs/beautifulsoup4/distros/debian.txt
new file mode 100644
index 00000000000..c1f5f713cda
--- /dev/null
+++ b/build/pkgs/beautifulsoup4/distros/debian.txt
@@ -0,0 +1 @@
+beautifulsoup4
diff --git a/build/pkgs/beautifulsoup4/distros/fedora.txt b/build/pkgs/beautifulsoup4/distros/fedora.txt
new file mode 100644
index 00000000000..2e8f9a66b24
--- /dev/null
+++ b/build/pkgs/beautifulsoup4/distros/fedora.txt
@@ -0,0 +1 @@
+python-beautifulsoup4
diff --git a/build/pkgs/beautifulsoup4/distros/gentoo.txt b/build/pkgs/beautifulsoup4/distros/gentoo.txt
new file mode 100644
index 00000000000..5bdeb6522ec
--- /dev/null
+++ b/build/pkgs/beautifulsoup4/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/beautifulsoup4
diff --git a/build/pkgs/beautifulsoup4/distros/opensuse.txt b/build/pkgs/beautifulsoup4/distros/opensuse.txt
new file mode 100644
index 00000000000..2e8f9a66b24
--- /dev/null
+++ b/build/pkgs/beautifulsoup4/distros/opensuse.txt
@@ -0,0 +1 @@
+python-beautifulsoup4
diff --git a/build/pkgs/beautifulsoup4/spkg-configure.m4 b/build/pkgs/beautifulsoup4/spkg-configure.m4
new file mode 100644
index 00000000000..e9298f00ba4
--- /dev/null
+++ b/build/pkgs/beautifulsoup4/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([beautifulsoup4], [SAGE_PYTHON_PACKAGE_CHECK([beautifulsoup4])])
diff --git a/build/pkgs/beniget/dependencies b/build/pkgs/beniget/dependencies
index d792a85db72..96da9efd5ac 100644
--- a/build/pkgs/beniget/dependencies
+++ b/build/pkgs/beniget/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) gast | $(PYTHON_TOOLCHAIN)
+ gast | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/beniget/distros/gentoo.txt b/build/pkgs/beniget/distros/gentoo.txt
new file mode 100644
index 00000000000..1a5972cd23f
--- /dev/null
+++ b/build/pkgs/beniget/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/beniget
diff --git a/build/pkgs/beniget/spkg-configure.m4 b/build/pkgs/beniget/spkg-configure.m4
new file mode 100644
index 00000000000..8ae6101333e
--- /dev/null
+++ b/build/pkgs/beniget/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([beniget], [SAGE_PYTHON_PACKAGE_CHECK([beniget])])
diff --git a/build/pkgs/bleach/dependencies b/build/pkgs/bleach/dependencies
index 4a74f9bfd68..c7ac2e8b3e7 100644
--- a/build/pkgs/bleach/dependencies
+++ b/build/pkgs/bleach/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) packaging six webencodings | $(PYTHON_TOOLCHAIN)
+ packaging six webencodings | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/bleach/distros/arch.txt b/build/pkgs/bleach/distros/arch.txt
new file mode 100644
index 00000000000..c5422ccff0c
--- /dev/null
+++ b/build/pkgs/bleach/distros/arch.txt
@@ -0,0 +1 @@
+python-bleach
diff --git a/build/pkgs/bleach/distros/debian.txt b/build/pkgs/bleach/distros/debian.txt
new file mode 100644
index 00000000000..6d37c4c79f0
--- /dev/null
+++ b/build/pkgs/bleach/distros/debian.txt
@@ -0,0 +1 @@
+python3-bleach
diff --git a/build/pkgs/bleach/distros/fedora.txt b/build/pkgs/bleach/distros/fedora.txt
new file mode 100644
index 00000000000..c5422ccff0c
--- /dev/null
+++ b/build/pkgs/bleach/distros/fedora.txt
@@ -0,0 +1 @@
+python-bleach
diff --git a/build/pkgs/bleach/distros/gentoo.txt b/build/pkgs/bleach/distros/gentoo.txt
new file mode 100644
index 00000000000..b4f9744eee9
--- /dev/null
+++ b/build/pkgs/bleach/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/bleach
diff --git a/build/pkgs/bleach/distros/opensuse.txt b/build/pkgs/bleach/distros/opensuse.txt
new file mode 100644
index 00000000000..0e329f6dd62
--- /dev/null
+++ b/build/pkgs/bleach/distros/opensuse.txt
@@ -0,0 +1 @@
+python3${PYTHON_MINOR}-bleach
diff --git a/build/pkgs/bleach/spkg-configure.m4 b/build/pkgs/bleach/spkg-configure.m4
new file mode 100644
index 00000000000..3c9bb26bb05
--- /dev/null
+++ b/build/pkgs/bleach/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([bleach], [SAGE_PYTHON_PACKAGE_CHECK([bleach])])
diff --git a/build/pkgs/bliss/SPKG.rst b/build/pkgs/bliss/SPKG.rst
index 875bc39a2df..77407f16ac7 100644
--- a/build/pkgs/bliss/SPKG.rst
+++ b/build/pkgs/bliss/SPKG.rst
@@ -10,17 +10,21 @@ canonical forms of graphs.
License
-------
-LGPL
+LGPL3
Upstream Contact
----------------
-Bliss is currently being maintained by Tommi Junttila and Petteri Kaski.
+Bliss is currently being maintained by Tommi Junttila at
+
+https://users.aalto.fi/~tjunttil/bliss/index.html
+
+Bliss used to be maintained by Tommi Junttila and Petteri Kaski up to version 0.73 at
http://www.tcs.tkk.fi/Software/bliss/index.html
-We apply patches generated from https://github.com/mkoeppe/bliss (branch
-apply_debian_patches) as our upstream. This tracks the patches from the
-Debian package, adding an autotools build system and adjusting the
-include file locations.
+Dependencies
+------------
+
+None
diff --git a/build/pkgs/bliss/checksums.ini b/build/pkgs/bliss/checksums.ini
index e97d89587bf..0c1ebf647bc 100644
--- a/build/pkgs/bliss/checksums.ini
+++ b/build/pkgs/bliss/checksums.ini
@@ -1,4 +1,5 @@
-tarball=bliss-VERSION.tar.gz
-sha1=1da8f098046824fbfff4c64c337e28b2a082f74f
-md5=452aea8737d3c4ad0d8ff39180be8004
-cksum=2193930007
+tarball=bliss-VERSION.zip
+sha1=c91c9dcbc11d66ffbcf6415e09ebe793df37be2a
+md5=5707cbfd9fd00980571c64ab3584c505
+cksum=1626493724
+upstream_url=https://users.aalto.fi/~tjunttil/bliss/downloads/bliss-VERSION.zip
diff --git a/build/pkgs/bliss/dependencies b/build/pkgs/bliss/dependencies
index 4f00de20375..c225c495cc6 100644
--- a/build/pkgs/bliss/dependencies
+++ b/build/pkgs/bliss/dependencies
@@ -1,4 +1,4 @@
-# no dependencies
+| cmake
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/bliss/distros/gentoo.txt b/build/pkgs/bliss/distros/gentoo.txt
new file mode 100644
index 00000000000..73add6de49d
--- /dev/null
+++ b/build/pkgs/bliss/distros/gentoo.txt
@@ -0,0 +1 @@
+sci-libs/bliss
diff --git a/build/pkgs/bliss/package-version.txt b/build/pkgs/bliss/package-version.txt
index e93ee1376fa..9e1e206c410 100644
--- a/build/pkgs/bliss/package-version.txt
+++ b/build/pkgs/bliss/package-version.txt
@@ -1 +1 @@
-0.73+debian-1+sage-2016-08-02.p0
+0.77
diff --git a/build/pkgs/bliss/patches/bliss-0.77-install.patch b/build/pkgs/bliss/patches/bliss-0.77-install.patch
new file mode 100644
index 00000000000..caab14aa40f
--- /dev/null
+++ b/build/pkgs/bliss/patches/bliss-0.77-install.patch
@@ -0,0 +1,32 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 01ed093..cfdb0a6 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -62,3 +62,27 @@ if(USE_GMP)
+ target_link_libraries(bliss-executable ${GMP_LIBRARIES})
+ endif(USE_GMP)
+ set_target_properties(bliss-executable PROPERTIES OUTPUT_NAME bliss)
++
++include(GNUInstallDirs)
++
++set(
++ BLISS_HEADERS
++ src/bliss_C.h
++ src/uintseqhash.hh
++ src/abstractgraph.hh
++ src/stats.hh
++ src/digraph.hh
++ src/defs.hh
++ src/heap.hh
++ src/graph.hh
++ src/partition.hh
++ src/kqueue.hh
++ src/utils.hh
++ src/orbit.hh
++ src/timer.hh
++ src/bignum.hh
++)
++
++install(TARGETS bliss-executable RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
++install(TARGETS bliss LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
++install(FILES ${BLISS_HEADERS} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/bliss)
diff --git a/build/pkgs/bliss/spkg-install.in b/build/pkgs/bliss/spkg-install.in
index aaf4c3037bc..4124a2338e5 100644
--- a/build/pkgs/bliss/spkg-install.in
+++ b/build/pkgs/bliss/spkg-install.in
@@ -1,4 +1,4 @@
cd src
-sdh_configure --disable-gmp
+sdh_cmake -DUSE_GMP=OFF -DCMAKE_VERBOSE_MAKEFILE=ON
sdh_make
sdh_make_install
diff --git a/build/pkgs/bliss/spkg-src b/build/pkgs/bliss/spkg-src
deleted file mode 100755
index 90073233b77..00000000000
--- a/build/pkgs/bliss/spkg-src
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/sh
-#
-# creates the tarball in the current dir, to be moved to ../../../upstream
-#
-# adapted from cliquer/spkg-src
-
-die () {
- echo >&2 "$@"
- exit 1
-}
-
-rm -rf bliss/
-git clone -b sage_package https://github.com/mkoeppe/bliss.git || die "Failed to git clone"
-cd bliss/
-
-VERSION=`autoconf --trace='AC_INIT:$2'`
-libtoolize || die "Failed to autoreconf"
-autoreconf -fi || die "Failed to autoreconf"
-automake --add-missing --copy || die "automake failed"
-./configure || die "configure failed"
-
-rm -f bliss-$VERSION.tar.gz
-make dist || die "make dist failed"
-mv bliss-$VERSION.tar.gz ../
-cd ..
-rm -rf bliss/
-
-
diff --git a/build/pkgs/cachetools/SPKG.rst b/build/pkgs/cachetools/SPKG.rst
new file mode 100644
index 00000000000..30035dfd3cd
--- /dev/null
+++ b/build/pkgs/cachetools/SPKG.rst
@@ -0,0 +1,18 @@
+cachetools: Extensible memoizing collections and decorators
+===========================================================
+
+Description
+-----------
+
+Extensible memoizing collections and decorators
+
+License
+-------
+
+MIT
+
+Upstream Contact
+----------------
+
+https://pypi.org/project/cachetools/
+
diff --git a/build/pkgs/cachetools/checksums.ini b/build/pkgs/cachetools/checksums.ini
new file mode 100644
index 00000000000..0ffe70b3b2f
--- /dev/null
+++ b/build/pkgs/cachetools/checksums.ini
@@ -0,0 +1,5 @@
+tarball=cachetools-VERSION-py3-none-any.whl
+sha1=f7deaa4b10ae6d8955c83b0573e5b80f84e5d87a
+md5=7375eb8031ea2c95b91d2406c29e9379
+cksum=3631496040
+upstream_url=https://pypi.io/packages/py3/c/cachetools/cachetools-VERSION-py3-none-any.whl
diff --git a/build/pkgs/cachetools/dependencies b/build/pkgs/cachetools/dependencies
new file mode 100644
index 00000000000..47296a7bace
--- /dev/null
+++ b/build/pkgs/cachetools/dependencies
@@ -0,0 +1,4 @@
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
+
+----------
+All lines of this file are ignored except the first.
diff --git a/build/pkgs/cachetools/install-requires.txt b/build/pkgs/cachetools/install-requires.txt
new file mode 100644
index 00000000000..50d14084a9b
--- /dev/null
+++ b/build/pkgs/cachetools/install-requires.txt
@@ -0,0 +1 @@
+cachetools
diff --git a/build/pkgs/cachetools/package-version.txt b/build/pkgs/cachetools/package-version.txt
new file mode 100644
index 00000000000..c7cb1311a64
--- /dev/null
+++ b/build/pkgs/cachetools/package-version.txt
@@ -0,0 +1 @@
+5.3.1
diff --git a/build/pkgs/cachetools/spkg-configure.m4 b/build/pkgs/cachetools/spkg-configure.m4
new file mode 100644
index 00000000000..1e6c1fb453a
--- /dev/null
+++ b/build/pkgs/cachetools/spkg-configure.m4
@@ -0,0 +1,7 @@
+SAGE_SPKG_CONFIGURE([cachetools], [
+ sage_spkg_install_cachetools=yes
+ ], [dnl REQUIRED-CHECK
+ AC_REQUIRE([SAGE_SPKG_CONFIGURE_TOX])
+ dnl cachetools is only needed when we cannot use system tox.
+ AS_VAR_SET([SPKG_REQUIRE], [$sage_spkg_install_tox])
+ ])
diff --git a/build/pkgs/flintqs/type b/build/pkgs/cachetools/type
similarity index 100%
rename from build/pkgs/flintqs/type
rename to build/pkgs/cachetools/type
diff --git a/build/pkgs/calver/SPKG.rst b/build/pkgs/calver/SPKG.rst
new file mode 100644
index 00000000000..ccdf38e3719
--- /dev/null
+++ b/build/pkgs/calver/SPKG.rst
@@ -0,0 +1,16 @@
+calver: Setuptools extension for CalVer package versions
+========================================================
+
+Description
+-----------
+
+Setuptools extension for CalVer package versions
+
+License
+-------
+
+Upstream Contact
+----------------
+
+https://pypi.org/project/calver/
+
diff --git a/build/pkgs/calver/checksums.ini b/build/pkgs/calver/checksums.ini
new file mode 100644
index 00000000000..358cbc4cf7b
--- /dev/null
+++ b/build/pkgs/calver/checksums.ini
@@ -0,0 +1,5 @@
+tarball=calver-VERSION-py3-none-any.whl
+sha1=4553e3fbfc58908f3be2dd529e5991986f6a46b5
+md5=3c34037d7bd217efd99b738aa1a7744b
+cksum=3667684754
+upstream_url=https://pypi.io/packages/py3/c/calver/calver-VERSION-py3-none-any.whl
diff --git a/build/pkgs/backports_zoneinfo/dependencies b/build/pkgs/calver/dependencies
similarity index 100%
rename from build/pkgs/backports_zoneinfo/dependencies
rename to build/pkgs/calver/dependencies
diff --git a/build/pkgs/calver/install-requires.txt b/build/pkgs/calver/install-requires.txt
new file mode 100644
index 00000000000..62948b78bc0
--- /dev/null
+++ b/build/pkgs/calver/install-requires.txt
@@ -0,0 +1 @@
+calver
diff --git a/build/pkgs/calver/package-version.txt b/build/pkgs/calver/package-version.txt
new file mode 100644
index 00000000000..42376d1100a
--- /dev/null
+++ b/build/pkgs/calver/package-version.txt
@@ -0,0 +1 @@
+2022.6.26
diff --git a/build/pkgs/html5lib/type b/build/pkgs/calver/type
similarity index 100%
rename from build/pkgs/html5lib/type
rename to build/pkgs/calver/type
diff --git a/build/pkgs/certifi/dependencies b/build/pkgs/certifi/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/certifi/dependencies
+++ b/build/pkgs/certifi/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/certifi/distros/arch.txt b/build/pkgs/certifi/distros/arch.txt
new file mode 100644
index 00000000000..be421c8b4b2
--- /dev/null
+++ b/build/pkgs/certifi/distros/arch.txt
@@ -0,0 +1 @@
+python-certifi
diff --git a/build/pkgs/certifi/distros/debian.txt b/build/pkgs/certifi/distros/debian.txt
new file mode 100644
index 00000000000..f585a823bf3
--- /dev/null
+++ b/build/pkgs/certifi/distros/debian.txt
@@ -0,0 +1 @@
+python3-certifi
diff --git a/build/pkgs/certifi/distros/fedora.txt b/build/pkgs/certifi/distros/fedora.txt
new file mode 100644
index 00000000000..be421c8b4b2
--- /dev/null
+++ b/build/pkgs/certifi/distros/fedora.txt
@@ -0,0 +1 @@
+python-certifi
diff --git a/build/pkgs/certifi/distros/gentoo.txt b/build/pkgs/certifi/distros/gentoo.txt
new file mode 100644
index 00000000000..72e2e91c6ae
--- /dev/null
+++ b/build/pkgs/certifi/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/certifi
diff --git a/build/pkgs/certifi/distros/opensuse.txt b/build/pkgs/certifi/distros/opensuse.txt
index f585a823bf3..9f7a6b5c964 100644
--- a/build/pkgs/certifi/distros/opensuse.txt
+++ b/build/pkgs/certifi/distros/opensuse.txt
@@ -1 +1 @@
-python3-certifi
+python3${PYTHON_MINOR}-certifi
diff --git a/build/pkgs/certifi/spkg-configure.m4 b/build/pkgs/certifi/spkg-configure.m4
new file mode 100644
index 00000000000..ddd40613514
--- /dev/null
+++ b/build/pkgs/certifi/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([certifi], [SAGE_PYTHON_PACKAGE_CHECK([certifi])])
diff --git a/build/pkgs/cffi/dependencies b/build/pkgs/cffi/dependencies
index 9e4c266ad69..9af7c6ed3db 100644
--- a/build/pkgs/cffi/dependencies
+++ b/build/pkgs/cffi/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN) pycparser
+ | $(PYTHON_TOOLCHAIN) pycparser $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/cffi/distros/gentoo.txt b/build/pkgs/cffi/distros/gentoo.txt
new file mode 100644
index 00000000000..81eeb8108c7
--- /dev/null
+++ b/build/pkgs/cffi/distros/gentoo.txt
@@ -0,0 +1 @@
+virtual/python-cffi
diff --git a/build/pkgs/cffi/distros/opensuse.txt b/build/pkgs/cffi/distros/opensuse.txt
index 68ec4dda5ba..6bce4cd18b5 100644
--- a/build/pkgs/cffi/distros/opensuse.txt
+++ b/build/pkgs/cffi/distros/opensuse.txt
@@ -1 +1 @@
-python3-cffi
+python3${PYTHON_MINOR}-cffi
diff --git a/build/pkgs/cffi/spkg-configure.m4 b/build/pkgs/cffi/spkg-configure.m4
new file mode 100644
index 00000000000..dc81875927f
--- /dev/null
+++ b/build/pkgs/cffi/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([cffi], [SAGE_PYTHON_PACKAGE_CHECK([cffi])])
diff --git a/build/pkgs/chardet/SPKG.rst b/build/pkgs/chardet/SPKG.rst
new file mode 100644
index 00000000000..6e5a717cca5
--- /dev/null
+++ b/build/pkgs/chardet/SPKG.rst
@@ -0,0 +1,18 @@
+chardet: Universal encoding detector for Python 3
+=================================================
+
+Description
+-----------
+
+Universal encoding detector for Python 3
+
+License
+-------
+
+LGPL
+
+Upstream Contact
+----------------
+
+https://pypi.org/project/chardet/
+
diff --git a/build/pkgs/chardet/checksums.ini b/build/pkgs/chardet/checksums.ini
new file mode 100644
index 00000000000..9911b1d139e
--- /dev/null
+++ b/build/pkgs/chardet/checksums.ini
@@ -0,0 +1,5 @@
+tarball=chardet-VERSION-py3-none-any.whl
+sha1=2facc0387556aa8a2956ef682d49fc3eae56d30a
+md5=b9eda7cd7d1582e269bd8eb7ffc4fcad
+cksum=1563594607
+upstream_url=https://pypi.io/packages/py3/c/chardet/chardet-VERSION-py3-none-any.whl
diff --git a/build/pkgs/chardet/dependencies b/build/pkgs/chardet/dependencies
new file mode 100644
index 00000000000..47296a7bace
--- /dev/null
+++ b/build/pkgs/chardet/dependencies
@@ -0,0 +1,4 @@
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
+
+----------
+All lines of this file are ignored except the first.
diff --git a/build/pkgs/chardet/install-requires.txt b/build/pkgs/chardet/install-requires.txt
new file mode 100644
index 00000000000..79236f25cda
--- /dev/null
+++ b/build/pkgs/chardet/install-requires.txt
@@ -0,0 +1 @@
+chardet
diff --git a/build/pkgs/chardet/package-version.txt b/build/pkgs/chardet/package-version.txt
new file mode 100644
index 00000000000..91ff57278e3
--- /dev/null
+++ b/build/pkgs/chardet/package-version.txt
@@ -0,0 +1 @@
+5.2.0
diff --git a/build/pkgs/chardet/spkg-configure.m4 b/build/pkgs/chardet/spkg-configure.m4
new file mode 100644
index 00000000000..2dba4eef338
--- /dev/null
+++ b/build/pkgs/chardet/spkg-configure.m4
@@ -0,0 +1,7 @@
+SAGE_SPKG_CONFIGURE([chardet], [
+ sage_spkg_install_chardet=yes
+ ], [dnl REQUIRED-CHECK
+ AC_REQUIRE([SAGE_SPKG_CONFIGURE_TOX])
+ dnl chardet is only needed when we cannot use system tox.
+ AS_VAR_SET([SPKG_REQUIRE], [$sage_spkg_install_tox])
+ ])
diff --git a/build/pkgs/jupyter_packaging/type b/build/pkgs/chardet/type
similarity index 100%
rename from build/pkgs/jupyter_packaging/type
rename to build/pkgs/chardet/type
diff --git a/build/pkgs/charset_normalizer/dependencies b/build/pkgs/charset_normalizer/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/charset_normalizer/dependencies
+++ b/build/pkgs/charset_normalizer/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/charset_normalizer/distros/gentoo.txt b/build/pkgs/charset_normalizer/distros/gentoo.txt
new file mode 100644
index 00000000000..eefeb89e31e
--- /dev/null
+++ b/build/pkgs/charset_normalizer/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/charset_normalizer
diff --git a/build/pkgs/charset_normalizer/spkg-configure.m4 b/build/pkgs/charset_normalizer/spkg-configure.m4
new file mode 100644
index 00000000000..18b18cf32b4
--- /dev/null
+++ b/build/pkgs/charset_normalizer/spkg-configure.m4
@@ -0,0 +1,3 @@
+SAGE_SPKG_CONFIGURE([charset_normalizer], [
+ SAGE_PYTHON_PACKAGE_CHECK([charset_normalizer])
+])
diff --git a/build/pkgs/cmake/checksums.ini b/build/pkgs/cmake/checksums.ini
index c89abdf4277..a145b30dfb4 100644
--- a/build/pkgs/cmake/checksums.ini
+++ b/build/pkgs/cmake/checksums.ini
@@ -1,5 +1,5 @@
tarball=cmake-VERSION.tar.gz
-sha1=256d6a57a57fa6ceaacd6a2daf708baefd33850c
-md5=226dd564164372f9f7d1e21e38e6e8c5
-cksum=2080281918
+sha1=3e9b980bfb16974f57ca02b5e2b403a2ef2d4eca
+md5=7228f5fcc8a858fdeac27e29bda0c144
+cksum=2027526722
upstream_url=https://github.com/Kitware/CMake/releases/download/vVERSION/cmake-VERSION.tar.gz
diff --git a/build/pkgs/cmake/package-version.txt b/build/pkgs/cmake/package-version.txt
index 693bd59e3e6..a155471fc06 100644
--- a/build/pkgs/cmake/package-version.txt
+++ b/build/pkgs/cmake/package-version.txt
@@ -1 +1 @@
-3.24.3
+3.27.3
diff --git a/build/pkgs/colorama/SPKG.rst b/build/pkgs/colorama/SPKG.rst
new file mode 100644
index 00000000000..3335092e4c7
--- /dev/null
+++ b/build/pkgs/colorama/SPKG.rst
@@ -0,0 +1,16 @@
+colorama: Cross-platform colored terminal text.
+===============================================
+
+Description
+-----------
+
+Cross-platform colored terminal text.
+
+License
+-------
+
+Upstream Contact
+----------------
+
+https://pypi.org/project/colorama/
+
diff --git a/build/pkgs/colorama/checksums.ini b/build/pkgs/colorama/checksums.ini
new file mode 100644
index 00000000000..e625d548a68
--- /dev/null
+++ b/build/pkgs/colorama/checksums.ini
@@ -0,0 +1,5 @@
+tarball=colorama-VERSION-py2.py3-none-any.whl
+sha1=d6ab1608850fecfc0e1cf50bf93d743695c04027
+md5=3fc7a89530d68d7ea231ebe779c0db9c
+cksum=3297334831
+upstream_url=https://pypi.io/packages/py2.py3/c/colorama/colorama-VERSION-py2.py3-none-any.whl
diff --git a/build/pkgs/colorama/dependencies b/build/pkgs/colorama/dependencies
new file mode 100644
index 00000000000..47296a7bace
--- /dev/null
+++ b/build/pkgs/colorama/dependencies
@@ -0,0 +1,4 @@
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
+
+----------
+All lines of this file are ignored except the first.
diff --git a/build/pkgs/colorama/install-requires.txt b/build/pkgs/colorama/install-requires.txt
new file mode 100644
index 00000000000..3fcfb51b2ad
--- /dev/null
+++ b/build/pkgs/colorama/install-requires.txt
@@ -0,0 +1 @@
+colorama
diff --git a/build/pkgs/colorama/package-version.txt b/build/pkgs/colorama/package-version.txt
new file mode 100644
index 00000000000..ef52a648073
--- /dev/null
+++ b/build/pkgs/colorama/package-version.txt
@@ -0,0 +1 @@
+0.4.6
diff --git a/build/pkgs/colorama/spkg-configure.m4 b/build/pkgs/colorama/spkg-configure.m4
new file mode 100644
index 00000000000..65c88b05ec8
--- /dev/null
+++ b/build/pkgs/colorama/spkg-configure.m4
@@ -0,0 +1,7 @@
+SAGE_SPKG_CONFIGURE([colorama], [
+ sage_spkg_install_colorama=yes
+ ], [dnl REQUIRED-CHECK
+ AC_REQUIRE([SAGE_SPKG_CONFIGURE_TOX])
+ dnl colorama is only needed when we cannot use system tox.
+ AS_VAR_SET([SPKG_REQUIRE], [$sage_spkg_install_tox])
+ ])
diff --git a/build/pkgs/pcre/type b/build/pkgs/colorama/type
similarity index 100%
rename from build/pkgs/pcre/type
rename to build/pkgs/colorama/type
diff --git a/build/pkgs/configure/checksums.ini b/build/pkgs/configure/checksums.ini
index 31e3b035d64..f6feef844de 100644
--- a/build/pkgs/configure/checksums.ini
+++ b/build/pkgs/configure/checksums.ini
@@ -1,4 +1,4 @@
tarball=configure-VERSION.tar.gz
-sha1=0897c667d1327d2a51ea3d3bd1d9e1a3f5ca2606
-md5=a1f271e5ffcf558d054028839296a072
-cksum=437557471
+sha1=96468a2d2ec8ee319095f3d2abd73e5f1ec7829d
+md5=87391217b5c82275e1cb581721877eec
+cksum=370856230
diff --git a/build/pkgs/configure/package-version.txt b/build/pkgs/configure/package-version.txt
index 9462f03cbd2..886c67921da 100644
--- a/build/pkgs/configure/package-version.txt
+++ b/build/pkgs/configure/package-version.txt
@@ -1 +1 @@
-08185d786047228caff879eba88a1f8148a49020
+b01856309bcb0d25e9cf830da19fa1cdd24df2bf
diff --git a/build/pkgs/contourpy/dependencies b/build/pkgs/contourpy/dependencies
index 0740ab1d4a7..d12b50bf33c 100644
--- a/build/pkgs/contourpy/dependencies
+++ b/build/pkgs/contourpy/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) numpy | $(PYTHON_TOOLCHAIN) pybind11
+ numpy | $(PYTHON_TOOLCHAIN) pybind11 $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/contourpy/distros/gentoo.txt b/build/pkgs/contourpy/distros/gentoo.txt
new file mode 100644
index 00000000000..39774cf783f
--- /dev/null
+++ b/build/pkgs/contourpy/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/contourpy
diff --git a/build/pkgs/contourpy/spkg-configure.m4 b/build/pkgs/contourpy/spkg-configure.m4
new file mode 100644
index 00000000000..f26adf351de
--- /dev/null
+++ b/build/pkgs/contourpy/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([contourpy], [SAGE_PYTHON_PACKAGE_CHECK([contourpy])])
diff --git a/build/pkgs/conway_polynomials/dependencies b/build/pkgs/conway_polynomials/dependencies
index 1700e743d59..6b134137610 100644
--- a/build/pkgs/conway_polynomials/dependencies
+++ b/build/pkgs/conway_polynomials/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON)
+| $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/cppy/dependencies b/build/pkgs/cppy/dependencies
index 0738c2d7777..47296a7bace 100644
--- a/build/pkgs/cppy/dependencies
+++ b/build/pkgs/cppy/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/cppy/distros/gentoo.txt b/build/pkgs/cppy/distros/gentoo.txt
new file mode 100644
index 00000000000..f66c6eff5ee
--- /dev/null
+++ b/build/pkgs/cppy/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/cppy
diff --git a/build/pkgs/cppy/install-requires.txt b/build/pkgs/cppy/install-requires.txt
index 9d2b4aaeee0..42667a30148 100644
--- a/build/pkgs/cppy/install-requires.txt
+++ b/build/pkgs/cppy/install-requires.txt
@@ -1 +1 @@
-cppy
+cppy >=1.2.0
diff --git a/build/pkgs/cppy/spkg-configure.m4 b/build/pkgs/cppy/spkg-configure.m4
new file mode 100644
index 00000000000..2c895d9b070
--- /dev/null
+++ b/build/pkgs/cppy/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([cppy], [SAGE_PYTHON_PACKAGE_CHECK([cppy])])
diff --git a/build/pkgs/cryptominisat/dependencies b/build/pkgs/cryptominisat/dependencies
index 15e88888b6d..e30473e40f6 100644
--- a/build/pkgs/cryptominisat/dependencies
+++ b/build/pkgs/cryptominisat/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) m4ri zlib libpng | cmake boost_cropped
+ m4ri zlib libpng | cmake boost_cropped $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/cvxopt/dependencies b/build/pkgs/cvxopt/dependencies
index d47ae01f215..33055fe8bf4 100644
--- a/build/pkgs/cvxopt/dependencies
+++ b/build/pkgs/cvxopt/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) numpy $(BLAS) gsl glpk suitesparse | $(PYTHON_TOOLCHAIN) pkgconfig
+ numpy $(BLAS) gsl glpk suitesparse | $(PYTHON_TOOLCHAIN) pkgconfig $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/cvxopt/distros/arch.txt b/build/pkgs/cvxopt/distros/arch.txt
new file mode 100644
index 00000000000..f15770f0506
--- /dev/null
+++ b/build/pkgs/cvxopt/distros/arch.txt
@@ -0,0 +1 @@
+python-cvxopt
diff --git a/build/pkgs/cvxopt/distros/debian.txt b/build/pkgs/cvxopt/distros/debian.txt
new file mode 100644
index 00000000000..2bb6ad1e834
--- /dev/null
+++ b/build/pkgs/cvxopt/distros/debian.txt
@@ -0,0 +1 @@
+python3-cvxopt
diff --git a/build/pkgs/cvxopt/distros/fedora.txt b/build/pkgs/cvxopt/distros/fedora.txt
new file mode 100644
index 00000000000..f15770f0506
--- /dev/null
+++ b/build/pkgs/cvxopt/distros/fedora.txt
@@ -0,0 +1 @@
+python-cvxopt
diff --git a/build/pkgs/cvxopt/distros/gentoo.txt b/build/pkgs/cvxopt/distros/gentoo.txt
new file mode 100644
index 00000000000..b3123912bbe
--- /dev/null
+++ b/build/pkgs/cvxopt/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/cvxopt
diff --git a/build/pkgs/cvxopt/distros/opensuse.txt b/build/pkgs/cvxopt/distros/opensuse.txt
new file mode 100644
index 00000000000..e254c198706
--- /dev/null
+++ b/build/pkgs/cvxopt/distros/opensuse.txt
@@ -0,0 +1 @@
+python3${PYTHON_MINOR}-cvxopt
diff --git a/build/pkgs/cvxopt/spkg-configure.m4 b/build/pkgs/cvxopt/spkg-configure.m4
new file mode 100644
index 00000000000..c4aa6198edb
--- /dev/null
+++ b/build/pkgs/cvxopt/spkg-configure.m4
@@ -0,0 +1,5 @@
+SAGE_SPKG_CONFIGURE([cvxopt], [
+ SAGE_SPKG_DEPCHECK([gsl glpk suitesparse], [
+ SAGE_PYTHON_PACKAGE_CHECK([cvxopt])
+ ])
+])
diff --git a/build/pkgs/cvxpy/SPKG.rst b/build/pkgs/cvxpy/SPKG.rst
new file mode 100644
index 00000000000..55998a0d419
--- /dev/null
+++ b/build/pkgs/cvxpy/SPKG.rst
@@ -0,0 +1,18 @@
+cvxpy: A domain-specific language for modeling convex optimization problems in Python.
+======================================================================================
+
+Description
+-----------
+
+A domain-specific language for modeling convex optimization problems in Python.
+
+License
+-------
+
+Apache License, Version 2.0
+
+Upstream Contact
+----------------
+
+https://pypi.org/project/cvxpy/
+
diff --git a/build/pkgs/cvxpy/checksums.ini b/build/pkgs/cvxpy/checksums.ini
new file mode 100644
index 00000000000..128dcda1602
--- /dev/null
+++ b/build/pkgs/cvxpy/checksums.ini
@@ -0,0 +1,5 @@
+tarball=cvxpy-VERSION.tar.gz
+sha1=8c87f8f8c2177f917ec2fad7d2b510787ffdf72d
+md5=408b0a3140750299207f61de95b4ed6e
+cksum=3643150234
+upstream_url=https://pypi.io/packages/source/c/cvxpy/cvxpy-VERSION.tar.gz
diff --git a/build/pkgs/cvxpy/dependencies b/build/pkgs/cvxpy/dependencies
new file mode 100644
index 00000000000..42cfab890cc
--- /dev/null
+++ b/build/pkgs/cvxpy/dependencies
@@ -0,0 +1,4 @@
+ numpy scipy glpk cvxopt osqp_python ecos_python scs | $(PYTHON_TOOLCHAIN) $(PYTHON)
+
+----------
+All lines of this file are ignored except the first.
diff --git a/build/pkgs/cvxpy/distros/conda.txt b/build/pkgs/cvxpy/distros/conda.txt
new file mode 100644
index 00000000000..187142bb93e
--- /dev/null
+++ b/build/pkgs/cvxpy/distros/conda.txt
@@ -0,0 +1 @@
+cvxpy
diff --git a/build/pkgs/cvxpy/install-requires.txt b/build/pkgs/cvxpy/install-requires.txt
new file mode 100644
index 00000000000..187142bb93e
--- /dev/null
+++ b/build/pkgs/cvxpy/install-requires.txt
@@ -0,0 +1 @@
+cvxpy
diff --git a/build/pkgs/cvxpy/package-version.txt b/build/pkgs/cvxpy/package-version.txt
new file mode 100644
index 00000000000..f0bb29e7638
--- /dev/null
+++ b/build/pkgs/cvxpy/package-version.txt
@@ -0,0 +1 @@
+1.3.0
diff --git a/build/pkgs/cvxpy/spkg-install.in b/build/pkgs/cvxpy/spkg-install.in
new file mode 100644
index 00000000000..a143d1eff96
--- /dev/null
+++ b/build/pkgs/cvxpy/spkg-install.in
@@ -0,0 +1,3 @@
+cd src
+# --no-build-isolation to ignore the numpy version pin in pyproject.toml
+sdh_pip_install --no-build-isolation .
diff --git a/build/pkgs/pyflakes/type b/build/pkgs/cvxpy/type
similarity index 100%
rename from build/pkgs/pyflakes/type
rename to build/pkgs/cvxpy/type
diff --git a/build/pkgs/cycler/dependencies b/build/pkgs/cycler/dependencies
index 730af09b339..8a158d645be 100644
--- a/build/pkgs/cycler/dependencies
+++ b/build/pkgs/cycler/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) six | $(PYTHON_TOOLCHAIN)
+ | $(PYTHON_TOOLCHAIN) $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/cycler/distros/arch.txt b/build/pkgs/cycler/distros/arch.txt
new file mode 100644
index 00000000000..5727259aca6
--- /dev/null
+++ b/build/pkgs/cycler/distros/arch.txt
@@ -0,0 +1 @@
+python-cycler
diff --git a/build/pkgs/cycler/distros/debian.txt b/build/pkgs/cycler/distros/debian.txt
new file mode 100644
index 00000000000..c77685dd417
--- /dev/null
+++ b/build/pkgs/cycler/distros/debian.txt
@@ -0,0 +1 @@
+python3-cycler
diff --git a/build/pkgs/cycler/distros/fedora.txt b/build/pkgs/cycler/distros/fedora.txt
new file mode 100644
index 00000000000..5727259aca6
--- /dev/null
+++ b/build/pkgs/cycler/distros/fedora.txt
@@ -0,0 +1 @@
+python-cycler
diff --git a/build/pkgs/cycler/distros/freebsd.txt b/build/pkgs/cycler/distros/freebsd.txt
new file mode 100644
index 00000000000..6da1dabb333
--- /dev/null
+++ b/build/pkgs/cycler/distros/freebsd.txt
@@ -0,0 +1 @@
+devel/py-cycler
diff --git a/build/pkgs/cycler/distros/gentoo.txt b/build/pkgs/cycler/distros/gentoo.txt
new file mode 100644
index 00000000000..4b215438e8f
--- /dev/null
+++ b/build/pkgs/cycler/distros/gentoo.txt
@@ -0,0 +1 @@
+dev-python/cycler
diff --git a/build/pkgs/cycler/distros/opensuse.txt b/build/pkgs/cycler/distros/opensuse.txt
new file mode 100644
index 00000000000..6aab7454acb
--- /dev/null
+++ b/build/pkgs/cycler/distros/opensuse.txt
@@ -0,0 +1 @@
+python3${PYTHON_MINOR}-Cycler
diff --git a/build/pkgs/cycler/spkg-configure.m4 b/build/pkgs/cycler/spkg-configure.m4
new file mode 100644
index 00000000000..239571a0f10
--- /dev/null
+++ b/build/pkgs/cycler/spkg-configure.m4
@@ -0,0 +1 @@
+SAGE_SPKG_CONFIGURE([cycler], [SAGE_PYTHON_PACKAGE_CHECK([cycler])])
diff --git a/build/pkgs/cylp/SPKG.rst b/build/pkgs/cylp/SPKG.rst
index 1bb0c61738d..10b8192e39c 100644
--- a/build/pkgs/cylp/SPKG.rst
+++ b/build/pkgs/cylp/SPKG.rst
@@ -9,7 +9,11 @@ A Python interface for CLP, CBC, and CGL
License
-------
-Eclipse Public License
+Eclipse Public License (EPL) version 2 (without a Secondary Licenses Notice).
+
+Note: This license is incompatible with the GPL according to
+https://www.gnu.org/licenses/license-list.html#EPL2;
+see also the discussion in :trac:`26511`.
Upstream Contact
----------------
diff --git a/build/pkgs/cylp/checksums.ini b/build/pkgs/cylp/checksums.ini
index 1b44a7d5faa..0a073c1569a 100644
--- a/build/pkgs/cylp/checksums.ini
+++ b/build/pkgs/cylp/checksums.ini
@@ -1,5 +1,5 @@
tarball=cylp-VERSION.tar.gz
-sha1=54965f2ae9b914df7817dffd53bc34925a6fadd4
-md5=a4f50e6b24a7fcd2e890a9e7e8825437
-cksum=4132703858
+sha1=1c2d20933abc48ed2fefc1ae45d8f9492fc2eef2
+md5=ac0308a916dac5dd84f831dbc0fba5c5
+cksum=1532166313
upstream_url=https://pypi.io/packages/source/c/cylp/cylp-VERSION.tar.gz
diff --git a/build/pkgs/cylp/dependencies b/build/pkgs/cylp/dependencies
index 3c541129eb9..d2c6405119d 100644
--- a/build/pkgs/cylp/dependencies
+++ b/build/pkgs/cylp/dependencies
@@ -1,4 +1,4 @@
-$(PYTHON) numpy scipy cbc | $(PYTHON_TOOLCHAIN) cython
+ numpy scipy cbc | $(PYTHON_TOOLCHAIN) cython $(PYTHON)
----------
All lines of this file are ignored except the first.
diff --git a/build/pkgs/cylp/package-version.txt b/build/pkgs/cylp/package-version.txt
index ad7e0bcae92..1d5e6c02bae 100644
--- a/build/pkgs/cylp/package-version.txt
+++ b/build/pkgs/cylp/package-version.txt
@@ -1 +1 @@
-0.91.4
+0.91.5
diff --git a/build/pkgs/cylp/patches/e619c4b94e279e96842da0d38ae657f06f1e9415.patch b/build/pkgs/cylp/patches/e619c4b94e279e96842da0d38ae657f06f1e9415.patch
deleted file mode 100644
index 295cae02b2c..00000000000
--- a/build/pkgs/cylp/patches/e619c4b94e279e96842da0d38ae657f06f1e9415.patch
+++ /dev/null
@@ -1,88285 +0,0 @@
-From e619c4b94e279e96842da0d38ae657f06f1e9415 Mon Sep 17 00:00:00 2001
-From: Ted Ralphs
-Date: Wed, 15 Dec 2021 18:01:49 -0500
-Subject: [PATCH] Re-generating with Cython 0.29.27 for Python 3.10
- compatibility. Fixes #132
-
----
- cylp/cy/CyCbcModel.cpp | 3071 ++++++--------------
- cylp/cy/CyCbcNode.cpp | 3671 +++++++-----------------
- cylp/cy/CyCgl.cpp | 3248 +++++++--------------
- cylp/cy/CyCglCutGeneratorBase.cpp | 3652 +++++++----------------
- cylp/cy/CyCglTreeInfo.cpp | 355 ++-
- cylp/cy/CyClpDualRowPivotBase.cpp | 3623 +++++++----------------
- cylp/cy/CyClpPrimalColumnPivotBase.cpp | 3652 +++++++----------------
- cylp/cy/CyClpSimplex.cpp | 2852 +++++-------------
- cylp/cy/CyCoinIndexedVector.cpp | 470 ++-
- cylp/cy/CyCoinModel.cpp | 3055 +++++---------------
- cylp/cy/CyCoinMpsIO.cpp | 2732 +++++-------------
- cylp/cy/CyCoinPackedMatrix.cpp | 3026 +++++--------------
- cylp/cy/CyCutGeneratorPythonBase.cpp | 2994 +++++--------------
- cylp/cy/CyDantzigPivot.cpp | 3217 ++++++---------------
- cylp/cy/CyDualPivotPythonBase.cpp | 3018 ++++++-------------
- cylp/cy/CyOsiCuts.cpp | 3040 ++++++--------------
- cylp/cy/CyOsiSolverInterface.cpp | 3037 +++++---------------
- cylp/cy/CyPEPivot.cpp | 3214 ++++++---------------
- cylp/cy/CyPivotPythonBase.cpp | 3174 ++++++--------------
- cylp/cy/CyTest.cpp | 3588 +++++++----------------
- cylp/cy/CyWolfePivot.cpp | 3292 +++++++--------------
- 21 files changed, 17581 insertions(+), 44400 deletions(-)
-
-diff --git a/cylp/cy/CyCbcModel.cpp b/cylp/cy/CyCbcModel.cpp
-index 14b5c2a..c62fd3b 100644
---- a/cylp/cy/CyCbcModel.cpp
-+++ b/cylp/cy/CyCbcModel.cpp
-@@ -1,14 +1,16 @@
--/* Generated by Cython 0.29.21 */
-+/* Generated by Cython 0.29.25 */
-
-+#ifndef PY_SSIZE_T_CLEAN
- #define PY_SSIZE_T_CLEAN
-+#endif /* PY_SSIZE_T_CLEAN */
- #include "Python.h"
- #ifndef Py_PYTHON_H
- #error Python headers needed to compile C extensions, please install development version of Python.
- #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
- #error Cython requires Python 2.6+ or Python 3.3+.
- #else
--#define CYTHON_ABI "0_29_21"
--#define CYTHON_HEX_VERSION 0x001D15F0
-+#define CYTHON_ABI "0_29_25"
-+#define CYTHON_HEX_VERSION 0x001D19F0
- #define CYTHON_FUTURE_DIVISION 0
- #include
- #ifndef offsetof
-@@ -155,7 +157,7 @@
- #ifndef CYTHON_USE_UNICODE_INTERNALS
- #define CYTHON_USE_UNICODE_INTERNALS 1
- #endif
-- #if PY_VERSION_HEX < 0x030300F0
-+ #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2
- #undef CYTHON_USE_UNICODE_WRITER
- #define CYTHON_USE_UNICODE_WRITER 0
- #elif !defined(CYTHON_USE_UNICODE_WRITER)
-@@ -174,7 +176,7 @@
- #define CYTHON_FAST_THREAD_STATE 1
- #endif
- #ifndef CYTHON_FAST_PYCALL
-- #define CYTHON_FAST_PYCALL 1
-+ #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1)
- #endif
- #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
- #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
-@@ -193,7 +195,9 @@
- #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
- #endif
- #if CYTHON_USE_PYLONG_INTERNALS
-- #include "longintrepr.h"
-+ #if PY_MAJOR_VERSION < 3
-+ #include "longintrepr.h"
-+ #endif
- #undef SHIFT
- #undef BASE
- #undef MASK
-@@ -324,9 +328,68 @@ class __Pyx_FakeReference {
- #define __Pyx_DefaultClassType PyClass_Type
- #else
- #define __Pyx_BUILTIN_MODULE_NAME "builtins"
--#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
-- #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
-- PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
-+ #define __Pyx_DefaultClassType PyType_Type
-+#if PY_VERSION_HEX >= 0x030B00A1
-+ static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f,
-+ PyObject *code, PyObject *c, PyObject* n, PyObject *v,
-+ PyObject *fv, PyObject *cell, PyObject* fn,
-+ PyObject *name, int fline, PyObject *lnos) {
-+ PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL;
-+ PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL;
-+ const char *fn_cstr=NULL;
-+ const char *name_cstr=NULL;
-+ PyCodeObject* co=NULL;
-+ PyObject *type, *value, *traceback;
-+ PyErr_Fetch(&type, &value, &traceback);
-+ if (!(kwds=PyDict_New())) goto end;
-+ if (!(argcount=PyLong_FromLong(a))) goto end;
-+ if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end;
-+ if (!(posonlyargcount=PyLong_FromLong(0))) goto end;
-+ if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end;
-+ if (!(kwonlyargcount=PyLong_FromLong(k))) goto end;
-+ if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end;
-+ if (!(nlocals=PyLong_FromLong(l))) goto end;
-+ if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end;
-+ if (!(stacksize=PyLong_FromLong(s))) goto end;
-+ if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end;
-+ if (!(flags=PyLong_FromLong(f))) goto end;
-+ if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end;
-+ if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end;
-+ if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end;
-+ if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end;
-+ if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end;
-+ if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end;
-+ if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end;
-+ if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end;
-+ if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end;
-+ if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end;
-+ if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end;
-+ if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too;
-+ if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here
-+ if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too;
-+ Py_XDECREF((PyObject*)co);
-+ co = (PyCodeObject*)call_result;
-+ call_result = NULL;
-+ if (0) {
-+ cleanup_code_too:
-+ Py_XDECREF((PyObject*)co);
-+ co = NULL;
-+ }
-+ end:
-+ Py_XDECREF(kwds);
-+ Py_XDECREF(argcount);
-+ Py_XDECREF(posonlyargcount);
-+ Py_XDECREF(kwonlyargcount);
-+ Py_XDECREF(nlocals);
-+ Py_XDECREF(stacksize);
-+ Py_XDECREF(replace);
-+ Py_XDECREF(call_result);
-+ Py_XDECREF(empty);
-+ if (type) {
-+ PyErr_Restore(type, value, traceback);
-+ }
-+ return co;
-+ }
- #else
- #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
- PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
-@@ -440,8 +503,12 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
- #endif
- #if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
- #define CYTHON_PEP393_ENABLED 1
-+ #if defined(PyUnicode_IS_READY)
- #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
- 0 : _PyUnicode_Ready((PyObject *)(op)))
-+ #else
-+ #define __Pyx_PyUnicode_READY(op) (0)
-+ #endif
- #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
- #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
- #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
-@@ -450,7 +517,11 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
- #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
- #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
- #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
-+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
-+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
-+ #else
- #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
-+ #endif
- #else
- #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
- #endif
-@@ -556,10 +627,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
- #if PY_VERSION_HEX < 0x030200A4
- typedef long Py_hash_t;
- #define __Pyx_PyInt_FromHash_t PyInt_FromLong
-- #define __Pyx_PyInt_AsHash_t PyInt_AsLong
-+ #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t
- #else
- #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
-- #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
-+ #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t
- #endif
- #if PY_MAJOR_VERSION >= 3
- #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
-@@ -622,7 +693,13 @@ static CYTHON_INLINE float __PYX_NAN() {
- #include
- #include
- #include "numpy/arrayobject.h"
-+#include "numpy/ndarrayobject.h"
-+#include "numpy/ndarraytypes.h"
-+#include "numpy/arrayscalars.h"
- #include "numpy/ufuncobject.h"
-+
-+ /* NumPy API declarations from "numpy/__init__.pxd" */
-+
- #include "CglAllDifferent.hpp"
- #include "CglClique.hpp"
- #include "CglKnapsackCover.hpp"
-@@ -650,11 +727,11 @@ static CYTHON_INLINE float __PYX_NAN() {
- #include "IClpDualRowPivotBase.h"
- #include "CoinModel.hpp"
- #include "ICoinPackedMatrix.hpp"
-+#include
- #include "ios"
- #include "new"
- #include "stdexcept"
- #include "typeinfo"
--#include
- #include
- #include "IClpSimplex.hpp"
- #include "ClpSimplex.hpp"
-@@ -761,6 +838,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
- (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
- static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
- static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
-+static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*);
- #if CYTHON_ASSUME_SAFE_MACROS
- #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
- #else
-@@ -916,7 +994,7 @@ static const char *__pyx_f[] = {
- "cylp/cy/CyCutGeneratorPythonBase.pxd",
- };
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":775
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":690
- * # in Cython to enable them only on the right systems.
- *
- * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<<
-@@ -925,7 +1003,7 @@ static const char *__pyx_f[] = {
- */
- typedef npy_int8 __pyx_t_5numpy_int8_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":776
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":691
- *
- * ctypedef npy_int8 int8_t
- * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<<
-@@ -934,7 +1012,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t;
- */
- typedef npy_int16 __pyx_t_5numpy_int16_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":777
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":692
- * ctypedef npy_int8 int8_t
- * ctypedef npy_int16 int16_t
- * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<<
-@@ -943,7 +1021,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t;
- */
- typedef npy_int32 __pyx_t_5numpy_int32_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":778
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":693
- * ctypedef npy_int16 int16_t
- * ctypedef npy_int32 int32_t
- * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<<
-@@ -952,7 +1030,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t;
- */
- typedef npy_int64 __pyx_t_5numpy_int64_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":782
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":697
- * #ctypedef npy_int128 int128_t
- *
- * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<<
-@@ -961,7 +1039,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t;
- */
- typedef npy_uint8 __pyx_t_5numpy_uint8_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":783
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":698
- *
- * ctypedef npy_uint8 uint8_t
- * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<<
-@@ -970,7 +1048,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t;
- */
- typedef npy_uint16 __pyx_t_5numpy_uint16_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":784
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":699
- * ctypedef npy_uint8 uint8_t
- * ctypedef npy_uint16 uint16_t
- * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<<
-@@ -979,7 +1057,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t;
- */
- typedef npy_uint32 __pyx_t_5numpy_uint32_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":785
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":700
- * ctypedef npy_uint16 uint16_t
- * ctypedef npy_uint32 uint32_t
- * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<<
-@@ -988,7 +1066,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t;
- */
- typedef npy_uint64 __pyx_t_5numpy_uint64_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":789
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":704
- * #ctypedef npy_uint128 uint128_t
- *
- * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<<
-@@ -997,7 +1075,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t;
- */
- typedef npy_float32 __pyx_t_5numpy_float32_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":790
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":705
- *
- * ctypedef npy_float32 float32_t
- * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<<
-@@ -1006,7 +1084,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t;
- */
- typedef npy_float64 __pyx_t_5numpy_float64_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":799
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":714
- * # The int types are mapped a bit surprising --
- * # numpy.int corresponds to 'l' and numpy.long to 'q'
- * ctypedef npy_long int_t # <<<<<<<<<<<<<<
-@@ -1015,7 +1093,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t;
- */
- typedef npy_long __pyx_t_5numpy_int_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":800
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":715
- * # numpy.int corresponds to 'l' and numpy.long to 'q'
- * ctypedef npy_long int_t
- * ctypedef npy_longlong long_t # <<<<<<<<<<<<<<
-@@ -1024,7 +1102,7 @@ typedef npy_long __pyx_t_5numpy_int_t;
- */
- typedef npy_longlong __pyx_t_5numpy_long_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":801
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":716
- * ctypedef npy_long int_t
- * ctypedef npy_longlong long_t
- * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<<
-@@ -1033,7 +1111,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t;
- */
- typedef npy_longlong __pyx_t_5numpy_longlong_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":803
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":718
- * ctypedef npy_longlong longlong_t
- *
- * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<<
-@@ -1042,7 +1120,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t;
- */
- typedef npy_ulong __pyx_t_5numpy_uint_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":804
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":719
- *
- * ctypedef npy_ulong uint_t
- * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<<
-@@ -1051,7 +1129,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t;
- */
- typedef npy_ulonglong __pyx_t_5numpy_ulong_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":805
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":720
- * ctypedef npy_ulong uint_t
- * ctypedef npy_ulonglong ulong_t
- * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<<
-@@ -1060,7 +1138,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t;
- */
- typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":807
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":722
- * ctypedef npy_ulonglong ulonglong_t
- *
- * ctypedef npy_intp intp_t # <<<<<<<<<<<<<<
-@@ -1069,7 +1147,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t;
- */
- typedef npy_intp __pyx_t_5numpy_intp_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":808
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":723
- *
- * ctypedef npy_intp intp_t
- * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<<
-@@ -1078,7 +1156,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t;
- */
- typedef npy_uintp __pyx_t_5numpy_uintp_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":810
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":725
- * ctypedef npy_uintp uintp_t
- *
- * ctypedef npy_double float_t # <<<<<<<<<<<<<<
-@@ -1087,7 +1165,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t;
- */
- typedef npy_double __pyx_t_5numpy_float_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":811
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":726
- *
- * ctypedef npy_double float_t
- * ctypedef npy_double double_t # <<<<<<<<<<<<<<
-@@ -1096,7 +1174,7 @@ typedef npy_double __pyx_t_5numpy_float_t;
- */
- typedef npy_double __pyx_t_5numpy_double_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":812
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":727
- * ctypedef npy_double float_t
- * ctypedef npy_double double_t
- * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<<
-@@ -1162,7 +1240,7 @@ struct __pyx_obj_4cylp_2cy_21CyCglCutGeneratorBase_CyCglCutGeneratorBase;
- struct __pyx_obj_4cylp_2cy_24CyCutGeneratorPythonBase_CyCutGeneratorPythonBase;
- struct __pyx_obj_4cylp_2cy_10CyCbcModel_CyCbcModel;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":814
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":729
- * ctypedef npy_longdouble longdouble_t
- *
- * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<<
-@@ -1171,7 +1249,7 @@ struct __pyx_obj_4cylp_2cy_10CyCbcModel_CyCbcModel;
- */
- typedef npy_cfloat __pyx_t_5numpy_cfloat_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":815
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":730
- *
- * ctypedef npy_cfloat cfloat_t
- * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<<
-@@ -1180,7 +1258,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t;
- */
- typedef npy_cdouble __pyx_t_5numpy_cdouble_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":816
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":731
- * ctypedef npy_cfloat cfloat_t
- * ctypedef npy_cdouble cdouble_t
- * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<<
-@@ -1189,7 +1267,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t;
- */
- typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t;
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":818
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":733
- * ctypedef npy_clongdouble clongdouble_t
- *
- * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<<
-@@ -1970,6 +2048,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
- #ifndef Py_MEMBER_SIZE
- #define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
- #endif
-+#if CYTHON_FAST_PYCALL
- static size_t __pyx_pyframe_localsplus_offset = 0;
- #include "frameobject.h"
- #define __Pxy_PyFrame_Initialize_Offsets()\
-@@ -1977,6 +2056,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
- (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
- #define __Pyx_PyFrame_GetLocalsplus(frame)\
- (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
-+#endif // CYTHON_FAST_PYCALL
- #endif
-
- /* PyObjectCall.proto */
-@@ -2192,29 +2272,6 @@ static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) {
- #define __Pyx_ListComp_Append(L,x) PyList_Append(L,x)
- #endif
-
--/* DictGetItem.proto */
--#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
--static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);
--#define __Pyx_PyObject_Dict_GetItem(obj, name)\
-- (likely(PyDict_CheckExact(obj)) ?\
-- __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name))
--#else
--#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key)
--#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name)
--#endif
--
--/* RaiseTooManyValuesToUnpack.proto */
--static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
--
--/* RaiseNeedMoreValuesToUnpack.proto */
--static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
--
--/* RaiseNoneIterError.proto */
--static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void);
--
--/* ExtTypeTest.proto */
--static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type);
--
- /* GetTopmostException.proto */
- #if CYTHON_USE_EXC_INFO_STACK
- static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate);
-@@ -2316,11 +2373,10 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
- static void __Pyx_AddTraceback(const char *funcname, int c_line,
- int py_line, const char *filename);
-
--/* CIntToPy.proto */
--static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
--
--/* CIntToPy.proto */
--static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
-+/* GCCDiagnostics.proto */
-+#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
-+#define __Pyx_HAS_GCC_DIAGNOSTIC
-+#endif
-
- /* RealImag.proto */
- #if CYTHON_CCOMPLEX
-@@ -2420,12 +2476,15 @@ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
- #endif
- #endif
-
--/* CIntToPy.proto */
--static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value);
--
- /* CIntFromPy.proto */
- static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
-
-+/* CIntToPy.proto */
-+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
-+
-+/* CIntToPy.proto */
-+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
-+
- /* CIntFromPy.proto */
- static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
-
-@@ -2542,8 +2601,17 @@ static PyTypeObject *__pyx_ptype_5numpy_dtype = 0;
- static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0;
- static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0;
- static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_generic = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_number = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_integer = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_signedinteger = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_unsignedinteger = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_inexact = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_floating = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_complexfloating = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_flexible = 0;
-+static PyTypeObject *__pyx_ptype_5numpy_character = 0;
- static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0;
--static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/
-
- /* Module declarations from 'cylp.cy.CyCgl' */
- static PyTypeObject *__pyx_ptype_4cylp_2cy_5CyCgl_CyCglCutGenerator = 0;
-@@ -2624,8 +2692,6 @@ static PyObject *__pyx_builtin_zip;
- static PyObject *__pyx_builtin_AttributeError;
- static PyObject *__pyx_builtin_TypeError;
- static PyObject *__pyx_builtin_range;
--static PyObject *__pyx_builtin_ValueError;
--static PyObject *__pyx_builtin_RuntimeError;
- static const char __pyx_k_[] = "";
- static const char __pyx_k_zip[] = "zip";
- static const char __pyx_k_dims[] = "dims";
-@@ -2661,7 +2727,6 @@ static const char __pyx_k_itertools[] = "itertools";
- static const char __pyx_k_reduce_ex[] = "__reduce_ex__";
- static const char __pyx_k_whatDepth[] = "whatDepth";
- static const char __pyx_k_CyCbcModel[] = "CyCbcModel";
--static const char __pyx_k_ValueError[] = "ValueError";
- static const char __pyx_k_atSolution[] = "atSolution";
- static const char __pyx_k_infeasible[] = "infeasible";
- static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__";
-@@ -2669,7 +2734,6 @@ static const char __pyx_k_ImportError[] = "ImportError";
- static const char __pyx_k_cylp_py_mip[] = "cylp.py.mip";
- static const char __pyx_k_newSolution[] = "newSolution";
- static const char __pyx_k_CyLPSolution[] = "CyLPSolution";
--static const char __pyx_k_RuntimeError[] = "RuntimeError";
- static const char __pyx_k_getVarByName[] = "getVarByName";
- static const char __pyx_k_howOftenInSub[] = "howOftenInSub";
- static const char __pyx_k_problemStatus[] = "problemStatus";
-@@ -2689,29 +2753,18 @@ static const char __pyx_k_relaxation_infeasible[] = "relaxation infeasible";
- static const char __pyx_k_stopped_on_user_event[] = "stopped on user event";
- static const char __pyx_k_pythonCutGeneratorObject[] = "pythonCutGeneratorObject";
- static const char __pyx_k_cylp_py_modeling_CyLPModel[] = "cylp.py.modeling.CyLPModel";
--static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous";
- static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import";
--static const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = "unknown dtype code in numpy.pxd (%d)";
--static const char __pyx_k_Format_string_allocated_too_shor[] = "Format string allocated too short, see comment in numpy.pxd";
--static const char __pyx_k_Non_native_byte_order_not_suppor[] = "Non-native byte order not supported";
--static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous";
- static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__";
- static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import";
- static const char __pyx_k_setNodeCompare_argument_should_b[] = "setNodeCompare argument should be a NodeCompareBase object. Got %s";
- static const char __pyx_k_stopped_on_solutionslinear_relax[] = "stopped on solutionslinear relaxation unbounded";
--static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short.";
- static PyObject *__pyx_kp_s_;
- static PyObject *__pyx_n_s_AttributeError;
- static PyObject *__pyx_n_s_CyCbcModel;
- static PyObject *__pyx_n_s_CyLPSolution;
--static PyObject *__pyx_kp_u_Format_string_allocated_too_shor;
--static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2;
- static PyObject *__pyx_n_s_ImportError;
- static PyObject *__pyx_n_s_NodeCompareBase;
--static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor;
--static PyObject *__pyx_n_s_RuntimeError;
- static PyObject *__pyx_n_s_TypeError;
--static PyObject *__pyx_n_s_ValueError;
- static PyObject *__pyx_n_s_addCutGenerator;
- static PyObject *__pyx_n_s_append;
- static PyObject *__pyx_n_s_atSolution;
-@@ -2740,8 +2793,6 @@ static PyObject *__pyx_n_s_keys;
- static PyObject *__pyx_n_s_main;
- static PyObject *__pyx_n_s_name;
- static PyObject *__pyx_n_s_name_2;
--static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous;
--static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou;
- static PyObject *__pyx_n_s_newSolution;
- static PyObject *__pyx_kp_s_no_default___reduce___due_to_non;
- static PyObject *__pyx_n_s_normal;
-@@ -2766,7 +2817,6 @@ static PyObject *__pyx_kp_s_stopped_on_solutionslinear_relax;
- static PyObject *__pyx_kp_s_stopped_on_time;
- static PyObject *__pyx_kp_s_stopped_on_user_event;
- static PyObject *__pyx_n_s_test;
--static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd;
- static PyObject *__pyx_n_s_unset;
- static PyObject *__pyx_kp_s_utf_8;
- static PyObject *__pyx_n_s_varIndex;
-@@ -2813,8 +2863,6 @@ static PyObject *__pyx_pf_4cylp_2cy_10CyCbcModel_10CyCbcModel_16maximumSolutions
- static int __pyx_pf_4cylp_2cy_10CyCbcModel_10CyCbcModel_16maximumSolutions_2__set__(struct __pyx_obj_4cylp_2cy_10CyCbcModel_CyCbcModel *__pyx_v_self, PyObject *__pyx_v_value); /* proto */
- static PyObject *__pyx_pf_4cylp_2cy_10CyCbcModel_10CyCbcModel_20__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_4cylp_2cy_10CyCbcModel_CyCbcModel *__pyx_v_self); /* proto */
- static PyObject *__pyx_pf_4cylp_2cy_10CyCbcModel_10CyCbcModel_22__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_4cylp_2cy_10CyCbcModel_CyCbcModel *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
--static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */
--static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */
- static PyObject *__pyx_tp_new_4cylp_2cy_10CyCbcModel_CyCbcModel(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
- static PyObject *__pyx_int_1;
- static PyObject *__pyx_int_neg_1;
-@@ -2823,11 +2871,6 @@ static PyObject *__pyx_tuple__2;
- static PyObject *__pyx_tuple__3;
- static PyObject *__pyx_tuple__4;
- static PyObject *__pyx_tuple__5;
--static PyObject *__pyx_tuple__6;
--static PyObject *__pyx_tuple__7;
--static PyObject *__pyx_tuple__8;
--static PyObject *__pyx_tuple__9;
--static PyObject *__pyx_tuple__10;
- /* Late includes */
-
- /* "cylp/cy/CyCbcModel.pyx":14
-@@ -7397,1939 +7440,331 @@ static PyObject *__pyx_pf_4cylp_2cy_10CyCbcModel_10CyCbcModel_22__setstate_cytho
- return __pyx_r;
- }
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":258
-- * # experimental exception made for __getbuffer__ and __releasebuffer__
-- * # -- the details of this may change.
-- * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<<
-- * # This implementation of getbuffer is geared towards Cython
-- * # requirements, and does not yet fulfill the PEP.
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":735
-+ * ctypedef npy_cdouble complex_t
-+ *
-+ * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(1, a)
-+ *
- */
-
--/* Python wrapper */
--static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/
--static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
-- int __pyx_r;
-- __Pyx_RefNannyDeclarations
-- __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0);
-- __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags));
--
-- /* function exit code */
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
-- int __pyx_v_i;
-- int __pyx_v_ndim;
-- int __pyx_v_endian_detector;
-- int __pyx_v_little_endian;
-- int __pyx_v_t;
-- char *__pyx_v_f;
-- PyArray_Descr *__pyx_v_descr = 0;
-- int __pyx_v_offset;
-- int __pyx_r;
-+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) {
-+ PyObject *__pyx_r = NULL;
- __Pyx_RefNannyDeclarations
-- int __pyx_t_1;
-- int __pyx_t_2;
-- PyObject *__pyx_t_3 = NULL;
-- int __pyx_t_4;
-- int __pyx_t_5;
-- int __pyx_t_6;
-- PyArray_Descr *__pyx_t_7;
-- PyObject *__pyx_t_8 = NULL;
-- char *__pyx_t_9;
-+ PyObject *__pyx_t_1 = NULL;
- int __pyx_lineno = 0;
- const char *__pyx_filename = NULL;
- int __pyx_clineno = 0;
-- if (__pyx_v_info == NULL) {
-- PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete");
-- return -1;
-- }
-- __Pyx_RefNannySetupContext("__getbuffer__", 0);
-- __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None);
-- __Pyx_GIVEREF(__pyx_v_info->obj);
-+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":265
-- *
-- * cdef int i, ndim
-- * cdef int endian_detector = 1 # <<<<<<<<<<<<<<
-- * cdef bint little_endian = ((&endian_detector)[0] != 0)
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":736
- *
-- */
-- __pyx_v_endian_detector = 1;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":266
-- * cdef int i, ndim
-- * cdef int endian_detector = 1
-- * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<<
-+ * cdef inline object PyArray_MultiIterNew1(a):
-+ * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<<
- *
-- * ndim = PyArray_NDIM(self)
-+ * cdef inline object PyArray_MultiIterNew2(a, b):
- */
-- __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);
-+ __Pyx_XDECREF(__pyx_r);
-+ __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 736, __pyx_L1_error)
-+ __Pyx_GOTREF(__pyx_t_1);
-+ __pyx_r = __pyx_t_1;
-+ __pyx_t_1 = 0;
-+ goto __pyx_L0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":268
-- * cdef bint little_endian = ((&endian_detector)[0] != 0)
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":735
-+ * ctypedef npy_cdouble complex_t
- *
-- * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<<
-+ * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(1, a)
- *
-- * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
- */
-- __pyx_v_ndim = PyArray_NDIM(__pyx_v_self);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":270
-- * ndim = PyArray_NDIM(self)
-- *
-- * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not C contiguous")
-- */
-- __pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0);
-- if (__pyx_t_2) {
-- } else {
-- __pyx_t_1 = __pyx_t_2;
-- goto __pyx_L4_bool_binop_done;
-- }
-+ /* function exit code */
-+ __pyx_L1_error:;
-+ __Pyx_XDECREF(__pyx_t_1);
-+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename);
-+ __pyx_r = 0;
-+ __pyx_L0:;
-+ __Pyx_XGIVEREF(__pyx_r);
-+ __Pyx_RefNannyFinishContext();
-+ return __pyx_r;
-+}
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":271
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":738
-+ * return PyArray_MultiIterNew(1, a)
- *
-- * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): # <<<<<<<<<<<<<<
-- * raise ValueError(u"ndarray is not C contiguous")
-+ * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(2, a, b)
- *
- */
-- __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_C_CONTIGUOUS) != 0)) != 0);
-- __pyx_t_1 = __pyx_t_2;
-- __pyx_L4_bool_binop_done:;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":270
-- * ndim = PyArray_NDIM(self)
-- *
-- * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not C contiguous")
-- */
-- if (unlikely(__pyx_t_1)) {
-+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) {
-+ PyObject *__pyx_r = NULL;
-+ __Pyx_RefNannyDeclarations
-+ PyObject *__pyx_t_1 = NULL;
-+ int __pyx_lineno = 0;
-+ const char *__pyx_filename = NULL;
-+ int __pyx_clineno = 0;
-+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":272
-- * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<<
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":739
- *
-- * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
-- */
-- __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 272, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __Pyx_Raise(__pyx_t_3, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __PYX_ERR(2, 272, __pyx_L1_error)
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":270
-- * ndim = PyArray_NDIM(self)
-+ * cdef inline object PyArray_MultiIterNew2(a, b):
-+ * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<<
- *
-- * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not C contiguous")
-+ * cdef inline object PyArray_MultiIterNew3(a, b, c):
- */
-- }
-+ __Pyx_XDECREF(__pyx_r);
-+ __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 739, __pyx_L1_error)
-+ __Pyx_GOTREF(__pyx_t_1);
-+ __pyx_r = __pyx_t_1;
-+ __pyx_t_1 = 0;
-+ goto __pyx_L0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":274
-- * raise ValueError(u"ndarray is not C contiguous")
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":738
-+ * return PyArray_MultiIterNew(1, a)
-+ *
-+ * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(2, a, b)
- *
-- * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not Fortran contiguous")
- */
-- __pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0);
-- if (__pyx_t_2) {
-- } else {
-- __pyx_t_1 = __pyx_t_2;
-- goto __pyx_L7_bool_binop_done;
-- }
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":275
-+ /* function exit code */
-+ __pyx_L1_error:;
-+ __Pyx_XDECREF(__pyx_t_1);
-+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename);
-+ __pyx_r = 0;
-+ __pyx_L0:;
-+ __Pyx_XGIVEREF(__pyx_r);
-+ __Pyx_RefNannyFinishContext();
-+ return __pyx_r;
-+}
-+
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":741
-+ * return PyArray_MultiIterNew(2, a, b)
- *
-- * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): # <<<<<<<<<<<<<<
-- * raise ValueError(u"ndarray is not Fortran contiguous")
-+ * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(3, a, b, c)
- *
- */
-- __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_F_CONTIGUOUS) != 0)) != 0);
-- __pyx_t_1 = __pyx_t_2;
-- __pyx_L7_bool_binop_done:;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":274
-- * raise ValueError(u"ndarray is not C contiguous")
-- *
-- * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not Fortran contiguous")
-- */
-- if (unlikely(__pyx_t_1)) {
-+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) {
-+ PyObject *__pyx_r = NULL;
-+ __Pyx_RefNannyDeclarations
-+ PyObject *__pyx_t_1 = NULL;
-+ int __pyx_lineno = 0;
-+ const char *__pyx_filename = NULL;
-+ int __pyx_clineno = 0;
-+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":276
-- * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<<
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":742
- *
-- * info.buf = PyArray_DATA(self)
-- */
-- __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 276, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __Pyx_Raise(__pyx_t_3, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __PYX_ERR(2, 276, __pyx_L1_error)
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":274
-- * raise ValueError(u"ndarray is not C contiguous")
-+ * cdef inline object PyArray_MultiIterNew3(a, b, c):
-+ * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<<
- *
-- * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
-- * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
-- * raise ValueError(u"ndarray is not Fortran contiguous")
-+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d):
- */
-- }
-+ __Pyx_XDECREF(__pyx_r);
-+ __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 742, __pyx_L1_error)
-+ __Pyx_GOTREF(__pyx_t_1);
-+ __pyx_r = __pyx_t_1;
-+ __pyx_t_1 = 0;
-+ goto __pyx_L0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":278
-- * raise ValueError(u"ndarray is not Fortran contiguous")
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":741
-+ * return PyArray_MultiIterNew(2, a, b)
- *
-- * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<<
-- * info.ndim = ndim
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t):
-- */
-- __pyx_v_info->buf = PyArray_DATA(__pyx_v_self);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":279
-+ * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(3, a, b, c)
- *
-- * info.buf = PyArray_DATA(self)
-- * info.ndim = ndim # <<<<<<<<<<<<<<
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t):
-- * # Allocate new buffer for strides and shape info.
-- */
-- __pyx_v_info->ndim = __pyx_v_ndim;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":280
-- * info.buf = PyArray_DATA(self)
-- * info.ndim = ndim
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
-- * # Allocate new buffer for strides and shape info.
-- * # This is allocated as one block, strides first.
- */
-- __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);
-- if (__pyx_t_1) {
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":283
-- * # Allocate new buffer for strides and shape info.
-- * # This is allocated as one block, strides first.
-- * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<<
-- * info.shape = info.strides + ndim
-- * for i in range(ndim):
-- */
-- __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim))));
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":284
-- * # This is allocated as one block, strides first.
-- * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim)
-- * info.shape = info.strides + ndim # <<<<<<<<<<<<<<
-- * for i in range(ndim):
-- * info.strides[i] = PyArray_STRIDES(self)[i]
-- */
-- __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":285
-- * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim)
-- * info.shape = info.strides + ndim
-- * for i in range(ndim): # <<<<<<<<<<<<<<
-- * info.strides[i] = PyArray_STRIDES(self)[i]
-- * info.shape[i] = PyArray_DIMS(self)[i]
-- */
-- __pyx_t_4 = __pyx_v_ndim;
-- __pyx_t_5 = __pyx_t_4;
-- for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {
-- __pyx_v_i = __pyx_t_6;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":286
-- * info.shape = info.strides + ndim
-- * for i in range(ndim):
-- * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<<
-- * info.shape[i] = PyArray_DIMS(self)[i]
-- * else:
-- */
-- (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]);
-+ /* function exit code */
-+ __pyx_L1_error:;
-+ __Pyx_XDECREF(__pyx_t_1);
-+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename);
-+ __pyx_r = 0;
-+ __pyx_L0:;
-+ __Pyx_XGIVEREF(__pyx_r);
-+ __Pyx_RefNannyFinishContext();
-+ return __pyx_r;
-+}
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":287
-- * for i in range(ndim):
-- * info.strides[i] = PyArray_STRIDES(self)[i]
-- * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<<
-- * else:
-- * info.strides = PyArray_STRIDES(self)
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":744
-+ * return PyArray_MultiIterNew(3, a, b, c)
-+ *
-+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(4, a, b, c, d)
-+ *
- */
-- (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]);
-- }
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":280
-- * info.buf = PyArray_DATA(self)
-- * info.ndim = ndim
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
-- * # Allocate new buffer for strides and shape info.
-- * # This is allocated as one block, strides first.
-- */
-- goto __pyx_L9;
-- }
-+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) {
-+ PyObject *__pyx_r = NULL;
-+ __Pyx_RefNannyDeclarations
-+ PyObject *__pyx_t_1 = NULL;
-+ int __pyx_lineno = 0;
-+ const char *__pyx_filename = NULL;
-+ int __pyx_clineno = 0;
-+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":289
-- * info.shape[i] = PyArray_DIMS(self)[i]
-- * else:
-- * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<<
-- * info.shape = PyArray_DIMS(self)
-- * info.suboffsets = NULL
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":745
-+ *
-+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d):
-+ * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<<
-+ *
-+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
- */
-- /*else*/ {
-- __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self));
-+ __Pyx_XDECREF(__pyx_r);
-+ __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 745, __pyx_L1_error)
-+ __Pyx_GOTREF(__pyx_t_1);
-+ __pyx_r = __pyx_t_1;
-+ __pyx_t_1 = 0;
-+ goto __pyx_L0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":290
-- * else:
-- * info.strides = PyArray_STRIDES(self)
-- * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<<
-- * info.suboffsets = NULL
-- * info.itemsize = PyArray_ITEMSIZE(self)
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":744
-+ * return PyArray_MultiIterNew(3, a, b, c)
-+ *
-+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(4, a, b, c, d)
-+ *
- */
-- __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self));
-- }
-- __pyx_L9:;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":291
-- * info.strides = PyArray_STRIDES(self)
-- * info.shape = PyArray_DIMS(self)
-- * info.suboffsets = NULL # <<<<<<<<<<<<<<
-- * info.itemsize = PyArray_ITEMSIZE(self)
-- * info.readonly = not PyArray_ISWRITEABLE(self)
-- */
-- __pyx_v_info->suboffsets = NULL;
-+ /* function exit code */
-+ __pyx_L1_error:;
-+ __Pyx_XDECREF(__pyx_t_1);
-+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename);
-+ __pyx_r = 0;
-+ __pyx_L0:;
-+ __Pyx_XGIVEREF(__pyx_r);
-+ __Pyx_RefNannyFinishContext();
-+ return __pyx_r;
-+}
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":292
-- * info.shape = PyArray_DIMS(self)
-- * info.suboffsets = NULL
-- * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<<
-- * info.readonly = not PyArray_ISWRITEABLE(self)
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":747
-+ * return PyArray_MultiIterNew(4, a, b, c, d)
- *
-- */
-- __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":293
-- * info.suboffsets = NULL
-- * info.itemsize = PyArray_ITEMSIZE(self)
-- * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<<
-+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
- *
-- * cdef int t
- */
-- __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0));
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":296
-- *
-- * cdef int t
-- * cdef char* f = NULL # <<<<<<<<<<<<<<
-- * cdef dtype descr = PyArray_DESCR(self)
-- * cdef int offset
-- */
-- __pyx_v_f = NULL;
-+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) {
-+ PyObject *__pyx_r = NULL;
-+ __Pyx_RefNannyDeclarations
-+ PyObject *__pyx_t_1 = NULL;
-+ int __pyx_lineno = 0;
-+ const char *__pyx_filename = NULL;
-+ int __pyx_clineno = 0;
-+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":297
-- * cdef int t
-- * cdef char* f = NULL
-- * cdef dtype descr = PyArray_DESCR(self) # <<<<<<<<<<<<<<
-- * cdef int offset
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":748
-+ *
-+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
-+ * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<<
- *
-+ * cdef inline tuple PyDataType_SHAPE(dtype d):
- */
-- __pyx_t_7 = PyArray_DESCR(__pyx_v_self);
-- __pyx_t_3 = ((PyObject *)__pyx_t_7);
-- __Pyx_INCREF(__pyx_t_3);
-- __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3);
-- __pyx_t_3 = 0;
-+ __Pyx_XDECREF(__pyx_r);
-+ __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 748, __pyx_L1_error)
-+ __Pyx_GOTREF(__pyx_t_1);
-+ __pyx_r = __pyx_t_1;
-+ __pyx_t_1 = 0;
-+ goto __pyx_L0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":300
-- * cdef int offset
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":747
-+ * return PyArray_MultiIterNew(4, a, b, c, d)
- *
-- * info.obj = self # <<<<<<<<<<<<<<
-- *
-- * if not PyDataType_HASFIELDS(descr):
-- */
-- __Pyx_INCREF(((PyObject *)__pyx_v_self));
-- __Pyx_GIVEREF(((PyObject *)__pyx_v_self));
-- __Pyx_GOTREF(__pyx_v_info->obj);
-- __Pyx_DECREF(__pyx_v_info->obj);
-- __pyx_v_info->obj = ((PyObject *)__pyx_v_self);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":302
-- * info.obj = self
-- *
-- * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<<
-- * t = descr.type_num
-- * if ((descr.byteorder == c'>' and little_endian) or
-- */
-- __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0);
-- if (__pyx_t_1) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":303
-- *
-- * if not PyDataType_HASFIELDS(descr):
-- * t = descr.type_num # <<<<<<<<<<<<<<
-- * if ((descr.byteorder == c'>' and little_endian) or
-- * (descr.byteorder == c'<' and not little_endian)):
-- */
-- __pyx_t_4 = __pyx_v_descr->type_num;
-- __pyx_v_t = __pyx_t_4;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":304
-- * if not PyDataType_HASFIELDS(descr):
-- * t = descr.type_num
-- * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
-- * (descr.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported")
-- */
-- __pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0);
-- if (!__pyx_t_2) {
-- goto __pyx_L15_next_or;
-- } else {
-- }
-- __pyx_t_2 = (__pyx_v_little_endian != 0);
-- if (!__pyx_t_2) {
-- } else {
-- __pyx_t_1 = __pyx_t_2;
-- goto __pyx_L14_bool_binop_done;
-- }
-- __pyx_L15_next_or:;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":305
-- * t = descr.type_num
-- * if ((descr.byteorder == c'>' and little_endian) or
-- * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<<
-- * raise ValueError(u"Non-native byte order not supported")
-- * if t == NPY_BYTE: f = "b"
-- */
-- __pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0);
-- if (__pyx_t_2) {
-- } else {
-- __pyx_t_1 = __pyx_t_2;
-- goto __pyx_L14_bool_binop_done;
-- }
-- __pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0);
-- __pyx_t_1 = __pyx_t_2;
-- __pyx_L14_bool_binop_done:;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":304
-- * if not PyDataType_HASFIELDS(descr):
-- * t = descr.type_num
-- * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
-- * (descr.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported")
-- */
-- if (unlikely(__pyx_t_1)) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":306
-- * if ((descr.byteorder == c'>' and little_endian) or
-- * (descr.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<<
-- * if t == NPY_BYTE: f = "b"
-- * elif t == NPY_UBYTE: f = "B"
-- */
-- __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 306, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __Pyx_Raise(__pyx_t_3, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __PYX_ERR(2, 306, __pyx_L1_error)
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":304
-- * if not PyDataType_HASFIELDS(descr):
-- * t = descr.type_num
-- * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
-- * (descr.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported")
-- */
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":307
-- * (descr.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported")
-- * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<<
-- * elif t == NPY_UBYTE: f = "B"
-- * elif t == NPY_SHORT: f = "h"
-- */
-- switch (__pyx_v_t) {
-- case NPY_BYTE:
-- __pyx_v_f = ((char *)"b");
-- break;
-- case NPY_UBYTE:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":308
-- * raise ValueError(u"Non-native byte order not supported")
-- * if t == NPY_BYTE: f = "b"
-- * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<<
-- * elif t == NPY_SHORT: f = "h"
-- * elif t == NPY_USHORT: f = "H"
-- */
-- __pyx_v_f = ((char *)"B");
-- break;
-- case NPY_SHORT:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":309
-- * if t == NPY_BYTE: f = "b"
-- * elif t == NPY_UBYTE: f = "B"
-- * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<<
-- * elif t == NPY_USHORT: f = "H"
-- * elif t == NPY_INT: f = "i"
-- */
-- __pyx_v_f = ((char *)"h");
-- break;
-- case NPY_USHORT:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":310
-- * elif t == NPY_UBYTE: f = "B"
-- * elif t == NPY_SHORT: f = "h"
-- * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<<
-- * elif t == NPY_INT: f = "i"
-- * elif t == NPY_UINT: f = "I"
-- */
-- __pyx_v_f = ((char *)"H");
-- break;
-- case NPY_INT:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":311
-- * elif t == NPY_SHORT: f = "h"
-- * elif t == NPY_USHORT: f = "H"
-- * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<<
-- * elif t == NPY_UINT: f = "I"
-- * elif t == NPY_LONG: f = "l"
-- */
-- __pyx_v_f = ((char *)"i");
-- break;
-- case NPY_UINT:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":312
-- * elif t == NPY_USHORT: f = "H"
-- * elif t == NPY_INT: f = "i"
-- * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<<
-- * elif t == NPY_LONG: f = "l"
-- * elif t == NPY_ULONG: f = "L"
-- */
-- __pyx_v_f = ((char *)"I");
-- break;
-- case NPY_LONG:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":313
-- * elif t == NPY_INT: f = "i"
-- * elif t == NPY_UINT: f = "I"
-- * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<<
-- * elif t == NPY_ULONG: f = "L"
-- * elif t == NPY_LONGLONG: f = "q"
-- */
-- __pyx_v_f = ((char *)"l");
-- break;
-- case NPY_ULONG:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":314
-- * elif t == NPY_UINT: f = "I"
-- * elif t == NPY_LONG: f = "l"
-- * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<<
-- * elif t == NPY_LONGLONG: f = "q"
-- * elif t == NPY_ULONGLONG: f = "Q"
-- */
-- __pyx_v_f = ((char *)"L");
-- break;
-- case NPY_LONGLONG:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":315
-- * elif t == NPY_LONG: f = "l"
-- * elif t == NPY_ULONG: f = "L"
-- * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<<
-- * elif t == NPY_ULONGLONG: f = "Q"
-- * elif t == NPY_FLOAT: f = "f"
-- */
-- __pyx_v_f = ((char *)"q");
-- break;
-- case NPY_ULONGLONG:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":316
-- * elif t == NPY_ULONG: f = "L"
-- * elif t == NPY_LONGLONG: f = "q"
-- * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<<
-- * elif t == NPY_FLOAT: f = "f"
-- * elif t == NPY_DOUBLE: f = "d"
-- */
-- __pyx_v_f = ((char *)"Q");
-- break;
-- case NPY_FLOAT:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":317
-- * elif t == NPY_LONGLONG: f = "q"
-- * elif t == NPY_ULONGLONG: f = "Q"
-- * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<<
-- * elif t == NPY_DOUBLE: f = "d"
-- * elif t == NPY_LONGDOUBLE: f = "g"
-- */
-- __pyx_v_f = ((char *)"f");
-- break;
-- case NPY_DOUBLE:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":318
-- * elif t == NPY_ULONGLONG: f = "Q"
-- * elif t == NPY_FLOAT: f = "f"
-- * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<<
-- * elif t == NPY_LONGDOUBLE: f = "g"
-- * elif t == NPY_CFLOAT: f = "Zf"
-- */
-- __pyx_v_f = ((char *)"d");
-- break;
-- case NPY_LONGDOUBLE:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":319
-- * elif t == NPY_FLOAT: f = "f"
-- * elif t == NPY_DOUBLE: f = "d"
-- * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<<
-- * elif t == NPY_CFLOAT: f = "Zf"
-- * elif t == NPY_CDOUBLE: f = "Zd"
-- */
-- __pyx_v_f = ((char *)"g");
-- break;
-- case NPY_CFLOAT:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":320
-- * elif t == NPY_DOUBLE: f = "d"
-- * elif t == NPY_LONGDOUBLE: f = "g"
-- * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<<
-- * elif t == NPY_CDOUBLE: f = "Zd"
-- * elif t == NPY_CLONGDOUBLE: f = "Zg"
-- */
-- __pyx_v_f = ((char *)"Zf");
-- break;
-- case NPY_CDOUBLE:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":321
-- * elif t == NPY_LONGDOUBLE: f = "g"
-- * elif t == NPY_CFLOAT: f = "Zf"
-- * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<<
-- * elif t == NPY_CLONGDOUBLE: f = "Zg"
-- * elif t == NPY_OBJECT: f = "O"
-- */
-- __pyx_v_f = ((char *)"Zd");
-- break;
-- case NPY_CLONGDOUBLE:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":322
-- * elif t == NPY_CFLOAT: f = "Zf"
-- * elif t == NPY_CDOUBLE: f = "Zd"
-- * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<<
-- * elif t == NPY_OBJECT: f = "O"
-- * else:
-- */
-- __pyx_v_f = ((char *)"Zg");
-- break;
-- case NPY_OBJECT:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":323
-- * elif t == NPY_CDOUBLE: f = "Zd"
-- * elif t == NPY_CLONGDOUBLE: f = "Zg"
-- * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<<
-- * else:
-- * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
-- */
-- __pyx_v_f = ((char *)"O");
-- break;
-- default:
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":325
-- * elif t == NPY_OBJECT: f = "O"
-- * else:
-- * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<<
-- * info.format = f
-- * return
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 325, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_8 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 325, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_8);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 325, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
-- __Pyx_Raise(__pyx_t_3, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __PYX_ERR(2, 325, __pyx_L1_error)
-- break;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":326
-- * else:
-- * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
-- * info.format = f # <<<<<<<<<<<<<<
-- * return
-- * else:
-- */
-- __pyx_v_info->format = __pyx_v_f;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":327
-- * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
-- * info.format = f
-- * return # <<<<<<<<<<<<<<
-- * else:
-- * info.format = PyObject_Malloc(_buffer_format_string_len)
-- */
-- __pyx_r = 0;
-- goto __pyx_L0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":302
-- * info.obj = self
-- *
-- * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<<
-- * t = descr.type_num
-- * if ((descr.byteorder == c'>' and little_endian) or
-- */
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":329
-- * return
-- * else:
-- * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<<
-- * info.format[0] = c'^' # Native data types, manual alignment
-- * offset = 0
-- */
-- /*else*/ {
-- __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF));
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":330
-- * else:
-- * info.format = PyObject_Malloc(_buffer_format_string_len)
-- * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<<
-- * offset = 0
-- * f = _util_dtypestring(descr, info.format + 1,
-- */
-- (__pyx_v_info->format[0]) = '^';
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":331
-- * info.format = PyObject_Malloc(_buffer_format_string_len)
-- * info.format[0] = c'^' # Native data types, manual alignment
-- * offset = 0 # <<<<<<<<<<<<<<
-- * f = _util_dtypestring(descr, info.format + 1,
-- * info.format + _buffer_format_string_len,
-- */
-- __pyx_v_offset = 0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":332
-- * info.format[0] = c'^' # Native data types, manual alignment
-- * offset = 0
-- * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<<
-- * info.format + _buffer_format_string_len,
-- * &offset)
-- */
-- __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(2, 332, __pyx_L1_error)
-- __pyx_v_f = __pyx_t_9;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":335
-- * info.format + _buffer_format_string_len,
-- * &offset)
-- * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<<
-- *
-- * def __releasebuffer__(ndarray self, Py_buffer* info):
-- */
-- (__pyx_v_f[0]) = '\x00';
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":258
-- * # experimental exception made for __getbuffer__ and __releasebuffer__
-- * # -- the details of this may change.
-- * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<<
-- * # This implementation of getbuffer is geared towards Cython
-- * # requirements, and does not yet fulfill the PEP.
-- */
--
-- /* function exit code */
-- __pyx_r = 0;
-- goto __pyx_L0;
-- __pyx_L1_error:;
-- __Pyx_XDECREF(__pyx_t_3);
-- __Pyx_XDECREF(__pyx_t_8);
-- __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename);
-- __pyx_r = -1;
-- if (__pyx_v_info->obj != NULL) {
-- __Pyx_GOTREF(__pyx_v_info->obj);
-- __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
-- }
-- goto __pyx_L2;
-- __pyx_L0:;
-- if (__pyx_v_info->obj == Py_None) {
-- __Pyx_GOTREF(__pyx_v_info->obj);
-- __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
-- }
-- __pyx_L2:;
-- __Pyx_XDECREF((PyObject *)__pyx_v_descr);
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":337
-- * f[0] = c'\0' # Terminate format string
-- *
-- * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<<
-- * if PyArray_HASFIELDS(self):
-- * PyObject_Free(info.format)
-- */
--
--/* Python wrapper */
--static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/
--static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) {
-- __Pyx_RefNannyDeclarations
-- __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0);
-- __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info));
--
-- /* function exit code */
-- __Pyx_RefNannyFinishContext();
--}
--
--static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) {
-- __Pyx_RefNannyDeclarations
-- int __pyx_t_1;
-- __Pyx_RefNannySetupContext("__releasebuffer__", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":338
-- *
-- * def __releasebuffer__(ndarray self, Py_buffer* info):
-- * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<<
-- * PyObject_Free(info.format)
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t):
-- */
-- __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0);
-- if (__pyx_t_1) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":339
-- * def __releasebuffer__(ndarray self, Py_buffer* info):
-- * if PyArray_HASFIELDS(self):
-- * PyObject_Free(info.format) # <<<<<<<<<<<<<<
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t):
-- * PyObject_Free(info.strides)
-- */
-- PyObject_Free(__pyx_v_info->format);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":338
-- *
-- * def __releasebuffer__(ndarray self, Py_buffer* info):
-- * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<<
-- * PyObject_Free(info.format)
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t):
-- */
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":340
-- * if PyArray_HASFIELDS(self):
-- * PyObject_Free(info.format)
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
-- * PyObject_Free(info.strides)
-- * # info.shape was stored after info.strides in the same block
-- */
-- __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);
-- if (__pyx_t_1) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":341
-- * PyObject_Free(info.format)
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t):
-- * PyObject_Free(info.strides) # <<<<<<<<<<<<<<
-- * # info.shape was stored after info.strides in the same block
-- *
-- */
-- PyObject_Free(__pyx_v_info->strides);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":340
-- * if PyArray_HASFIELDS(self):
-- * PyObject_Free(info.format)
-- * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
-- * PyObject_Free(info.strides)
-- * # info.shape was stored after info.strides in the same block
-- */
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":337
-- * f[0] = c'\0' # Terminate format string
-- *
-- * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<<
-- * if PyArray_HASFIELDS(self):
-- * PyObject_Free(info.format)
-- */
--
-- /* function exit code */
-- __Pyx_RefNannyFinishContext();
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":820
-- * ctypedef npy_cdouble complex_t
-- *
-- * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(1, a)
-- *
-- */
--
--static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) {
-- PyObject *__pyx_r = NULL;
-- __Pyx_RefNannyDeclarations
-- PyObject *__pyx_t_1 = NULL;
-- int __pyx_lineno = 0;
-- const char *__pyx_filename = NULL;
-- int __pyx_clineno = 0;
-- __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":821
-- *
-- * cdef inline object PyArray_MultiIterNew1(a):
-- * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<<
-- *
-- * cdef inline object PyArray_MultiIterNew2(a, b):
-- */
-- __Pyx_XDECREF(__pyx_r);
-- __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 821, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_1);
-- __pyx_r = __pyx_t_1;
-- __pyx_t_1 = 0;
-- goto __pyx_L0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":820
-- * ctypedef npy_cdouble complex_t
-- *
-- * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(1, a)
-- *
-- */
--
-- /* function exit code */
-- __pyx_L1_error:;
-- __Pyx_XDECREF(__pyx_t_1);
-- __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename);
-- __pyx_r = 0;
-- __pyx_L0:;
-- __Pyx_XGIVEREF(__pyx_r);
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":823
-- * return PyArray_MultiIterNew(1, a)
-- *
-- * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(2, a, b)
-- *
-- */
--
--static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) {
-- PyObject *__pyx_r = NULL;
-- __Pyx_RefNannyDeclarations
-- PyObject *__pyx_t_1 = NULL;
-- int __pyx_lineno = 0;
-- const char *__pyx_filename = NULL;
-- int __pyx_clineno = 0;
-- __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":824
-- *
-- * cdef inline object PyArray_MultiIterNew2(a, b):
-- * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<<
-- *
-- * cdef inline object PyArray_MultiIterNew3(a, b, c):
-- */
-- __Pyx_XDECREF(__pyx_r);
-- __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 824, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_1);
-- __pyx_r = __pyx_t_1;
-- __pyx_t_1 = 0;
-- goto __pyx_L0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":823
-- * return PyArray_MultiIterNew(1, a)
-- *
-- * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(2, a, b)
-- *
-- */
--
-- /* function exit code */
-- __pyx_L1_error:;
-- __Pyx_XDECREF(__pyx_t_1);
-- __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename);
-- __pyx_r = 0;
-- __pyx_L0:;
-- __Pyx_XGIVEREF(__pyx_r);
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":826
-- * return PyArray_MultiIterNew(2, a, b)
-- *
-- * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(3, a, b, c)
-- *
-- */
--
--static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) {
-- PyObject *__pyx_r = NULL;
-- __Pyx_RefNannyDeclarations
-- PyObject *__pyx_t_1 = NULL;
-- int __pyx_lineno = 0;
-- const char *__pyx_filename = NULL;
-- int __pyx_clineno = 0;
-- __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":827
-- *
-- * cdef inline object PyArray_MultiIterNew3(a, b, c):
-- * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<<
-- *
-- * cdef inline object PyArray_MultiIterNew4(a, b, c, d):
-- */
-- __Pyx_XDECREF(__pyx_r);
-- __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 827, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_1);
-- __pyx_r = __pyx_t_1;
-- __pyx_t_1 = 0;
-- goto __pyx_L0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":826
-- * return PyArray_MultiIterNew(2, a, b)
-- *
-- * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(3, a, b, c)
-- *
-- */
--
-- /* function exit code */
-- __pyx_L1_error:;
-- __Pyx_XDECREF(__pyx_t_1);
-- __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename);
-- __pyx_r = 0;
-- __pyx_L0:;
-- __Pyx_XGIVEREF(__pyx_r);
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":829
-- * return PyArray_MultiIterNew(3, a, b, c)
-- *
-- * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(4, a, b, c, d)
-- *
-- */
--
--static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) {
-- PyObject *__pyx_r = NULL;
-- __Pyx_RefNannyDeclarations
-- PyObject *__pyx_t_1 = NULL;
-- int __pyx_lineno = 0;
-- const char *__pyx_filename = NULL;
-- int __pyx_clineno = 0;
-- __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":830
-- *
-- * cdef inline object PyArray_MultiIterNew4(a, b, c, d):
-- * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<<
-- *
-- * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
-- */
-- __Pyx_XDECREF(__pyx_r);
-- __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 830, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_1);
-- __pyx_r = __pyx_t_1;
-- __pyx_t_1 = 0;
-- goto __pyx_L0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":829
-- * return PyArray_MultiIterNew(3, a, b, c)
-- *
-- * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(4, a, b, c, d)
-- *
-- */
--
-- /* function exit code */
-- __pyx_L1_error:;
-- __Pyx_XDECREF(__pyx_t_1);
-- __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename);
-- __pyx_r = 0;
-- __pyx_L0:;
-- __Pyx_XGIVEREF(__pyx_r);
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":832
-- * return PyArray_MultiIterNew(4, a, b, c, d)
-- *
-- * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(5, a, b, c, d, e)
-- *
-- */
--
--static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) {
-- PyObject *__pyx_r = NULL;
-- __Pyx_RefNannyDeclarations
-- PyObject *__pyx_t_1 = NULL;
-- int __pyx_lineno = 0;
-- const char *__pyx_filename = NULL;
-- int __pyx_clineno = 0;
-- __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":833
-- *
-- * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
-- * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<<
-- *
-- * cdef inline tuple PyDataType_SHAPE(dtype d):
-- */
-- __Pyx_XDECREF(__pyx_r);
-- __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 833, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_1);
-- __pyx_r = __pyx_t_1;
-- __pyx_t_1 = 0;
-- goto __pyx_L0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":832
-- * return PyArray_MultiIterNew(4, a, b, c, d)
-- *
-- * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
-- * return PyArray_MultiIterNew(5, a, b, c, d, e)
-+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
-+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
- *
- */
-
- /* function exit code */
- __pyx_L1_error:;
- __Pyx_XDECREF(__pyx_t_1);
-- __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename);
-- __pyx_r = 0;
-- __pyx_L0:;
-- __Pyx_XGIVEREF(__pyx_r);
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":835
-- * return PyArray_MultiIterNew(5, a, b, c, d, e)
-- *
-- * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
-- * if PyDataType_HASSUBARRAY(d):
-- * return d.subarray.shape
-- */
--
--static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) {
-- PyObject *__pyx_r = NULL;
-- __Pyx_RefNannyDeclarations
-- int __pyx_t_1;
-- __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":836
-- *
-- * cdef inline tuple PyDataType_SHAPE(dtype d):
-- * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
-- * return d.subarray.shape
-- * else:
-- */
-- __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0);
-- if (__pyx_t_1) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":837
-- * cdef inline tuple PyDataType_SHAPE(dtype d):
-- * if PyDataType_HASSUBARRAY(d):
-- * return d.subarray.shape # <<<<<<<<<<<<<<
-- * else:
-- * return ()
-- */
-- __Pyx_XDECREF(__pyx_r);
-- __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape));
-- __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape);
-- goto __pyx_L0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":836
-- *
-- * cdef inline tuple PyDataType_SHAPE(dtype d):
-- * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
-- * return d.subarray.shape
-- * else:
-- */
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":839
-- * return d.subarray.shape
-- * else:
-- * return () # <<<<<<<<<<<<<<
-- *
-- * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL:
-- */
-- /*else*/ {
-- __Pyx_XDECREF(__pyx_r);
-- __Pyx_INCREF(__pyx_empty_tuple);
-- __pyx_r = __pyx_empty_tuple;
-- goto __pyx_L0;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":835
-- * return PyArray_MultiIterNew(5, a, b, c, d, e)
-- *
-- * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
-- * if PyDataType_HASSUBARRAY(d):
-- * return d.subarray.shape
-- */
--
-- /* function exit code */
-- __pyx_L0:;
-- __Pyx_XGIVEREF(__pyx_r);
-- __Pyx_RefNannyFinishContext();
-- return __pyx_r;
--}
--
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":841
-- * return ()
-- *
-- * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<<
-- * # Recursive utility function used in __getbuffer__ to get format
-- * # string. The new location in the format string is returned.
-- */
--
--static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) {
-- PyArray_Descr *__pyx_v_child = 0;
-- int __pyx_v_endian_detector;
-- int __pyx_v_little_endian;
-- PyObject *__pyx_v_fields = 0;
-- PyObject *__pyx_v_childname = NULL;
-- PyObject *__pyx_v_new_offset = NULL;
-- PyObject *__pyx_v_t = NULL;
-- char *__pyx_r;
-- __Pyx_RefNannyDeclarations
-- PyObject *__pyx_t_1 = NULL;
-- Py_ssize_t __pyx_t_2;
-- PyObject *__pyx_t_3 = NULL;
-- PyObject *__pyx_t_4 = NULL;
-- int __pyx_t_5;
-- int __pyx_t_6;
-- int __pyx_t_7;
-- long __pyx_t_8;
-- char *__pyx_t_9;
-- int __pyx_lineno = 0;
-- const char *__pyx_filename = NULL;
-- int __pyx_clineno = 0;
-- __Pyx_RefNannySetupContext("_util_dtypestring", 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":846
-- *
-- * cdef dtype child
-- * cdef int endian_detector = 1 # <<<<<<<<<<<<<<
-- * cdef bint little_endian = ((&endian_detector)[0] != 0)
-- * cdef tuple fields
-- */
-- __pyx_v_endian_detector = 1;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":847
-- * cdef dtype child
-- * cdef int endian_detector = 1
-- * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<<
-- * cdef tuple fields
-- *
-- */
-- __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":850
-- * cdef tuple fields
-- *
-- * for childname in descr.names: # <<<<<<<<<<<<<<
-- * fields = descr.fields[childname]
-- * child, new_offset = fields
-- */
-- if (unlikely(__pyx_v_descr->names == Py_None)) {
-- PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable");
-- __PYX_ERR(2, 850, __pyx_L1_error)
-- }
-- __pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0;
-- for (;;) {
-- if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break;
-- #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
-- __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(2, 850, __pyx_L1_error)
-- #else
-- __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 850, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- #endif
-- __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3);
-- __pyx_t_3 = 0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":851
-- *
-- * for childname in descr.names:
-- * fields = descr.fields[childname] # <<<<<<<<<<<<<<
-- * child, new_offset = fields
-- *
-- */
-- if (unlikely(__pyx_v_descr->fields == Py_None)) {
-- PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
-- __PYX_ERR(2, 851, __pyx_L1_error)
-- }
-- __pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 851, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(2, 851, __pyx_L1_error)
-- __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3));
-- __pyx_t_3 = 0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":852
-- * for childname in descr.names:
-- * fields = descr.fields[childname]
-- * child, new_offset = fields # <<<<<<<<<<<<<<
-- *
-- * if (end - f) - (new_offset - offset[0]) < 15:
-- */
-- if (likely(__pyx_v_fields != Py_None)) {
-- PyObject* sequence = __pyx_v_fields;
-- Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
-- if (unlikely(size != 2)) {
-- if (size > 2) __Pyx_RaiseTooManyValuesError(2);
-- else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
-- __PYX_ERR(2, 852, __pyx_L1_error)
-- }
-- #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
-- __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0);
-- __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1);
-- __Pyx_INCREF(__pyx_t_3);
-- __Pyx_INCREF(__pyx_t_4);
-- #else
-- __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 852, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 852, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- #endif
-- } else {
-- __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(2, 852, __pyx_L1_error)
-- }
-- if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(2, 852, __pyx_L1_error)
-- __Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3));
-- __pyx_t_3 = 0;
-- __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4);
-- __pyx_t_4 = 0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":854
-- * child, new_offset = fields
-- *
-- * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<<
-- * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
-- *
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 854, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 854, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 854, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0);
-- if (unlikely(__pyx_t_6)) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":855
-- *
-- * if (end - f) - (new_offset - offset[0]) < 15:
-- * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<<
-- *
-- * if ((child.byteorder == c'>' and little_endian) or
-- */
-- __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 855, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __Pyx_Raise(__pyx_t_3, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __PYX_ERR(2, 855, __pyx_L1_error)
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":854
-- * child, new_offset = fields
-- *
-- * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<<
-- * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
-- *
-- */
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":857
-- * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
-- *
-- * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
-- * (child.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported")
-- */
-- __pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0);
-- if (!__pyx_t_7) {
-- goto __pyx_L8_next_or;
-- } else {
-- }
-- __pyx_t_7 = (__pyx_v_little_endian != 0);
-- if (!__pyx_t_7) {
-- } else {
-- __pyx_t_6 = __pyx_t_7;
-- goto __pyx_L7_bool_binop_done;
-- }
-- __pyx_L8_next_or:;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":858
-- *
-- * if ((child.byteorder == c'>' and little_endian) or
-- * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<<
-- * raise ValueError(u"Non-native byte order not supported")
-- * # One could encode it in the format string and have Cython
-- */
-- __pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0);
-- if (__pyx_t_7) {
-- } else {
-- __pyx_t_6 = __pyx_t_7;
-- goto __pyx_L7_bool_binop_done;
-- }
-- __pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0);
-- __pyx_t_6 = __pyx_t_7;
-- __pyx_L7_bool_binop_done:;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":857
-- * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
-- *
-- * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
-- * (child.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported")
-- */
-- if (unlikely(__pyx_t_6)) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":859
-- * if ((child.byteorder == c'>' and little_endian) or
-- * (child.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<<
-- * # One could encode it in the format string and have Cython
-- * # complain instead, BUT: < and > in format strings also imply
-- */
-- __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 859, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __Pyx_Raise(__pyx_t_3, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __PYX_ERR(2, 859, __pyx_L1_error)
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":857
-- * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
-- *
-- * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
-- * (child.byteorder == c'<' and not little_endian)):
-- * raise ValueError(u"Non-native byte order not supported")
-- */
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":869
-- *
-- * # Output padding bytes
-- * while offset[0] < new_offset: # <<<<<<<<<<<<<<
-- * f[0] = 120 # "x"; pad byte
-- * f += 1
-- */
-- while (1) {
-- __pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 869, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 869, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 869, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (!__pyx_t_6) break;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":870
-- * # Output padding bytes
-- * while offset[0] < new_offset:
-- * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<<
-- * f += 1
-- * offset[0] += 1
-- */
-- (__pyx_v_f[0]) = 0x78;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":871
-- * while offset[0] < new_offset:
-- * f[0] = 120 # "x"; pad byte
-- * f += 1 # <<<<<<<<<<<<<<
-- * offset[0] += 1
-- *
-- */
-- __pyx_v_f = (__pyx_v_f + 1);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":872
-- * f[0] = 120 # "x"; pad byte
-- * f += 1
-- * offset[0] += 1 # <<<<<<<<<<<<<<
-- *
-- * offset[0] += child.itemsize
-- */
-- __pyx_t_8 = 0;
-- (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1);
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":874
-- * offset[0] += 1
-- *
-- * offset[0] += child.itemsize # <<<<<<<<<<<<<<
-- *
-- * if not PyDataType_HASFIELDS(child):
-- */
-- __pyx_t_8 = 0;
-- (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize);
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":876
-- * offset[0] += child.itemsize
-- *
-- * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<<
-- * t = child.type_num
-- * if end - f < 5:
-- */
-- __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0);
-- if (__pyx_t_6) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":877
-- *
-- * if not PyDataType_HASFIELDS(child):
-- * t = child.type_num # <<<<<<<<<<<<<<
-- * if end - f < 5:
-- * raise RuntimeError(u"Format string allocated too short.")
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 877, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4);
-- __pyx_t_4 = 0;
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":878
-- * if not PyDataType_HASFIELDS(child):
-- * t = child.type_num
-- * if end - f < 5: # <<<<<<<<<<<<<<
-- * raise RuntimeError(u"Format string allocated too short.")
-- *
-- */
-- __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0);
-- if (unlikely(__pyx_t_6)) {
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":879
-- * t = child.type_num
-- * if end - f < 5:
-- * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<<
-- *
-- * # Until ticket #99 is fixed, use integers to avoid warnings
-- */
-- __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 879, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __Pyx_Raise(__pyx_t_4, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __PYX_ERR(2, 879, __pyx_L1_error)
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":878
-- * if not PyDataType_HASFIELDS(child):
-- * t = child.type_num
-- * if end - f < 5: # <<<<<<<<<<<<<<
-- * raise RuntimeError(u"Format string allocated too short.")
-- *
-- */
-- }
-+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename);
-+ __pyx_r = 0;
-+ __pyx_L0:;
-+ __Pyx_XGIVEREF(__pyx_r);
-+ __Pyx_RefNannyFinishContext();
-+ return __pyx_r;
-+}
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":882
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":750
-+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
- *
-- * # Until ticket #99 is fixed, use integers to avoid warnings
-- * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<<
-- * elif t == NPY_UBYTE: f[0] = 66 #"B"
-- * elif t == NPY_SHORT: f[0] = 104 #"h"
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 882, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 882, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 882, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 98;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":883
-- * # Until ticket #99 is fixed, use integers to avoid warnings
-- * if t == NPY_BYTE: f[0] = 98 #"b"
-- * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<<
-- * elif t == NPY_SHORT: f[0] = 104 #"h"
-- * elif t == NPY_USHORT: f[0] = 72 #"H"
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 883, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 883, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 883, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 66;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":884
-- * if t == NPY_BYTE: f[0] = 98 #"b"
-- * elif t == NPY_UBYTE: f[0] = 66 #"B"
-- * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<<
-- * elif t == NPY_USHORT: f[0] = 72 #"H"
-- * elif t == NPY_INT: f[0] = 105 #"i"
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 884, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 884, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 884, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 0x68;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":885
-- * elif t == NPY_UBYTE: f[0] = 66 #"B"
-- * elif t == NPY_SHORT: f[0] = 104 #"h"
-- * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<<
-- * elif t == NPY_INT: f[0] = 105 #"i"
-- * elif t == NPY_UINT: f[0] = 73 #"I"
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 885, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 885, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 885, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 72;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":886
-- * elif t == NPY_SHORT: f[0] = 104 #"h"
-- * elif t == NPY_USHORT: f[0] = 72 #"H"
-- * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<<
-- * elif t == NPY_UINT: f[0] = 73 #"I"
-- * elif t == NPY_LONG: f[0] = 108 #"l"
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 886, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 886, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 886, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 0x69;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":887
-- * elif t == NPY_USHORT: f[0] = 72 #"H"
-- * elif t == NPY_INT: f[0] = 105 #"i"
-- * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<<
-- * elif t == NPY_LONG: f[0] = 108 #"l"
-- * elif t == NPY_ULONG: f[0] = 76 #"L"
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 887, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 887, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 887, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 73;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":888
-- * elif t == NPY_INT: f[0] = 105 #"i"
-- * elif t == NPY_UINT: f[0] = 73 #"I"
-- * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<<
-- * elif t == NPY_ULONG: f[0] = 76 #"L"
-- * elif t == NPY_LONGLONG: f[0] = 113 #"q"
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 888, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 888, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 888, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 0x6C;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":889
-- * elif t == NPY_UINT: f[0] = 73 #"I"
-- * elif t == NPY_LONG: f[0] = 108 #"l"
-- * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<<
-- * elif t == NPY_LONGLONG: f[0] = 113 #"q"
-- * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 889, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 889, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 889, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 76;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":890
-- * elif t == NPY_LONG: f[0] = 108 #"l"
-- * elif t == NPY_ULONG: f[0] = 76 #"L"
-- * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<<
-- * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
-- * elif t == NPY_FLOAT: f[0] = 102 #"f"
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 890, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 890, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 890, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 0x71;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":891
-- * elif t == NPY_ULONG: f[0] = 76 #"L"
-- * elif t == NPY_LONGLONG: f[0] = 113 #"q"
-- * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<<
-- * elif t == NPY_FLOAT: f[0] = 102 #"f"
-- * elif t == NPY_DOUBLE: f[0] = 100 #"d"
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 891, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 891, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 891, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 81;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":892
-- * elif t == NPY_LONGLONG: f[0] = 113 #"q"
-- * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
-- * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<<
-- * elif t == NPY_DOUBLE: f[0] = 100 #"d"
-- * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 892, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 892, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 892, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 0x66;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":893
-- * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
-- * elif t == NPY_FLOAT: f[0] = 102 #"f"
-- * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<<
-- * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
-- * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 893, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 893, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 893, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 0x64;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":894
-- * elif t == NPY_FLOAT: f[0] = 102 #"f"
-- * elif t == NPY_DOUBLE: f[0] = 100 #"d"
-- * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<<
-- * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
-- * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 894, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 894, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 894, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 0x67;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":895
-- * elif t == NPY_DOUBLE: f[0] = 100 #"d"
-- * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
-- * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<<
-- * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
-- * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 895, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 895, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 895, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 90;
-- (__pyx_v_f[1]) = 0x66;
-- __pyx_v_f = (__pyx_v_f + 1);
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":896
-- * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
-- * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
-- * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<<
-- * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
-- * elif t == NPY_OBJECT: f[0] = 79 #"O"
-- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 896, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 896, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 896, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 90;
-- (__pyx_v_f[1]) = 0x64;
-- __pyx_v_f = (__pyx_v_f + 1);
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":897
-- * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
-- * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
-- * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<<
-- * elif t == NPY_OBJECT: f[0] = 79 #"O"
-- * else:
-- */
-- __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 897, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 897, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 897, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- if (__pyx_t_6) {
-- (__pyx_v_f[0]) = 90;
-- (__pyx_v_f[1]) = 0x67;
-- __pyx_v_f = (__pyx_v_f + 1);
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":898
-- * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
-- * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
-- * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<<
-- * else:
-- * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
-+ * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
-+ * if PyDataType_HASSUBARRAY(d):
-+ * return d.subarray.shape
- */
-- __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 898, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 898, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 898, __pyx_L1_error)
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- if (likely(__pyx_t_6)) {
-- (__pyx_v_f[0]) = 79;
-- goto __pyx_L15;
-- }
--
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":900
-- * elif t == NPY_OBJECT: f[0] = 79 #"O"
-- * else:
-- * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<<
-- * f += 1
-- * else:
-- */
-- /*else*/ {
-- __pyx_t_3 = __Pyx_PyUnicode_FormatSafe(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 900, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_3);
-- __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 900, __pyx_L1_error)
-- __Pyx_GOTREF(__pyx_t_4);
-- __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
-- __Pyx_Raise(__pyx_t_4, 0, 0, 0);
-- __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
-- __PYX_ERR(2, 900, __pyx_L1_error)
-- }
-- __pyx_L15:;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":901
-- * else:
-- * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
-- * f += 1 # <<<<<<<<<<<<<<
-- * else:
-- * # Cython ignores struct boundary information ("T{...}"),
-- */
-- __pyx_v_f = (__pyx_v_f + 1);
-+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) {
-+ PyObject *__pyx_r = NULL;
-+ __Pyx_RefNannyDeclarations
-+ int __pyx_t_1;
-+ __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":876
-- * offset[0] += child.itemsize
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":751
- *
-- * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<<
-- * t = child.type_num
-- * if end - f < 5:
-+ * cdef inline tuple PyDataType_SHAPE(dtype d):
-+ * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
-+ * return d.subarray.shape
-+ * else:
- */
-- goto __pyx_L13;
-- }
-+ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0);
-+ if (__pyx_t_1) {
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":905
-- * # Cython ignores struct boundary information ("T{...}"),
-- * # so don't output it
-- * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<<
-- * return f
-- *
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":752
-+ * cdef inline tuple PyDataType_SHAPE(dtype d):
-+ * if PyDataType_HASSUBARRAY(d):
-+ * return d.subarray.shape # <<<<<<<<<<<<<<
-+ * else:
-+ * return ()
- */
-- /*else*/ {
-- __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(2, 905, __pyx_L1_error)
-- __pyx_v_f = __pyx_t_9;
-- }
-- __pyx_L13:;
-+ __Pyx_XDECREF(__pyx_r);
-+ __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape));
-+ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape);
-+ goto __pyx_L0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":850
-- * cdef tuple fields
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":751
- *
-- * for childname in descr.names: # <<<<<<<<<<<<<<
-- * fields = descr.fields[childname]
-- * child, new_offset = fields
-+ * cdef inline tuple PyDataType_SHAPE(dtype d):
-+ * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
-+ * return d.subarray.shape
-+ * else:
- */
- }
-- __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":906
-- * # so don't output it
-- * f = _util_dtypestring(child, f, end, offset)
-- * return f # <<<<<<<<<<<<<<
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":754
-+ * return d.subarray.shape
-+ * else:
-+ * return () # <<<<<<<<<<<<<<
- *
- *
- */
-- __pyx_r = __pyx_v_f;
-- goto __pyx_L0;
-+ /*else*/ {
-+ __Pyx_XDECREF(__pyx_r);
-+ __Pyx_INCREF(__pyx_empty_tuple);
-+ __pyx_r = __pyx_empty_tuple;
-+ goto __pyx_L0;
-+ }
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":841
-- * return ()
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":750
-+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
- *
-- * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<<
-- * # Recursive utility function used in __getbuffer__ to get format
-- * # string. The new location in the format string is returned.
-+ * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
-+ * if PyDataType_HASSUBARRAY(d):
-+ * return d.subarray.shape
- */
-
- /* function exit code */
-- __pyx_L1_error:;
-- __Pyx_XDECREF(__pyx_t_1);
-- __Pyx_XDECREF(__pyx_t_3);
-- __Pyx_XDECREF(__pyx_t_4);
-- __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename);
-- __pyx_r = NULL;
- __pyx_L0:;
-- __Pyx_XDECREF((PyObject *)__pyx_v_child);
-- __Pyx_XDECREF(__pyx_v_fields);
-- __Pyx_XDECREF(__pyx_v_childname);
-- __Pyx_XDECREF(__pyx_v_new_offset);
-- __Pyx_XDECREF(__pyx_v_t);
-+ __Pyx_XGIVEREF(__pyx_r);
- __Pyx_RefNannyFinishContext();
- return __pyx_r;
- }
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1021
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":929
- * int _import_umath() except -1
- *
- * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<<
-@@ -9341,7 +7776,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a
- __Pyx_RefNannyDeclarations
- __Pyx_RefNannySetupContext("set_array_base", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1022
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":930
- *
- * cdef inline void set_array_base(ndarray arr, object base):
- * Py_INCREF(base) # important to do this before stealing the reference below! # <<<<<<<<<<<<<<
-@@ -9350,7 +7785,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a
- */
- Py_INCREF(__pyx_v_base);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1023
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":931
- * cdef inline void set_array_base(ndarray arr, object base):
- * Py_INCREF(base) # important to do this before stealing the reference below!
- * PyArray_SetBaseObject(arr, base) # <<<<<<<<<<<<<<
-@@ -9359,7 +7794,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a
- */
- (void)(PyArray_SetBaseObject(__pyx_v_arr, __pyx_v_base));
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1021
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":929
- * int _import_umath() except -1
- *
- * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<<
-@@ -9371,7 +7806,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a
- __Pyx_RefNannyFinishContext();
- }
-
--/* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1025
-+/* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":933
- * PyArray_SetBaseObject(arr, base)
- *
- * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<<
-@@ -9386,7 +7821,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py
- int __pyx_t_1;
- __Pyx_RefNannySetupContext("get_array_base", 0);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1026
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":934
- *
- * cdef inline object get_array_base(ndarray arr):
- * base = PyArray_BASE(arr) # <<<<<<<<<<<<<<
-@@ -9395,7 +7830,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py
- */
- __pyx_v_base = PyArray_BASE(__pyx_v_arr);
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1027
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":935
- * cdef inline object get_array_base(ndarray arr):
- * base = PyArray_BASE(arr)
- * if base is NULL: # <<<<<<<<<<<<<<
-@@ -9405,7 +7840,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py
- __pyx_t_1 = ((__pyx_v_base == NULL) != 0);
- if (__pyx_t_1) {
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1028
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":936
- * base = PyArray_BASE(arr)
- * if base is NULL:
- * return None # <<<<<<<<<<<<<<
-@@ -9416,7 +7851,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py
- __pyx_r = Py_None; __Pyx_INCREF(Py_None);
- goto __pyx_L0;
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1027
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":935
- * cdef inline object get_array_base(ndarray arr):
- * base = PyArray_BASE(arr)
- * if base is NULL: # <<<<<<<<<<<<<<
-@@ -9425,7 +7860,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py
- */
- }
-
-- /* "../../.local/anaconda3/lib/python3.8/site-packages/Cython/Includes/numpy/__init__.pxd":1029
-+ /* "../miniforge3/envs/numpy-dev/lib/python3.10/site-packages/numpy/__init__.pxd":937
- * if base is NULL:
- * return None
- * return