diff --git a/cylc/sphinx_ext/cylc_lang/__init__.py b/cylc/sphinx_ext/cylc_lang/__init__.py
index 700687a..bef3932 100644
--- a/cylc/sphinx_ext/cylc_lang/__init__.py
+++ b/cylc/sphinx_ext/cylc_lang/__init__.py
@@ -15,7 +15,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
# -----------------------------------------------------------------------------
-'''An extension providing pygments lexers for the Cylc suite.rc language.
+'''An extension providing pygments lexers for the Cylc flow.cylc language.
Pygments Lexers
@@ -24,7 +24,7 @@
``cylc``
^^^^^^^^
-Lexer for the Cylc ``.rc`` language and ``suite.rc`` extensions.
+Lexer for the Cylc language and ``flow.cylc`` extensions.
.. rst-example::
@@ -62,7 +62,7 @@
.. rst-example::
- .. cylc:conf:: my-conf1.rc
+ .. cylc:conf:: my-conf1.cylc
.. cylc:setting:: foo
@@ -78,7 +78,7 @@
a section called ``bar``.
- Here's a link to :cylc:conf:`this section `, note
+ Here's a link to :cylc:conf:`this section `, note
we re-named the target using the sphinx/rst ``name `` syntax.
.. cylc:setting:: pub
@@ -154,7 +154,7 @@
.. rst-example::
- .. cylc-scope:: my-conf1.rc[bar]
+ .. cylc-scope:: my-conf1.cylc[bar]
Lets head to the :cylc:conf:`pub`.
@@ -166,7 +166,7 @@
.. note::
- This resets it to the hardcoded default which is ``suite.rc``.
+ This resets it to the hardcoded default which is ``flow.cylc``.
'''
diff --git a/cylc/sphinx_ext/cylc_lang/domains.py b/cylc/sphinx_ext/cylc_lang/domains.py
index e509c5f..9a2bfaf 100644
--- a/cylc/sphinx_ext/cylc_lang/domains.py
+++ b/cylc/sphinx_ext/cylc_lang/domains.py
@@ -8,7 +8,7 @@
from sphinx.util.nodes import make_refnode
-DEFAULT_SCOPE = 'suite.rc'
+DEFAULT_SCOPE = 'flow.cylc'
KEYS = {
'conf': lambda s: f'{s}',
@@ -64,8 +64,8 @@ def tokenise(namespace_string):
Examples:
Normal Usage:
- >>> tokenise('x.rc') # doctest: +NORMALIZE_WHITESPACE
- {'conf': 'x.rc',
+ >>> tokenise('x.cylc') # doctest: +NORMALIZE_WHITESPACE
+ {'conf': 'x.cylc',
'section': None,
'setting': None,
'value': None}
@@ -74,13 +74,13 @@ def tokenise(namespace_string):
'section': None,
'setting': 'a',
'value': 'b'}
- >>> tokenise('x.rc[a][b][c]d = e') # doctest: +NORMALIZE_WHITESPACE
- {'conf': 'x.rc',
+ >>> tokenise('x.cylc[a][b][c]d = e') # doctest: +NORMALIZE_WHITESPACE
+ {'conf': 'x.cylc',
'section': ('a', 'b', 'c'),
'setting': 'd',
'value': 'e'}
- >>> tokenise('x.rc|a') # doctest: +NORMALIZE_WHITESPACE
- {'conf': 'x.rc',
+ >>> tokenise('x.cylc|a') # doctest: +NORMALIZE_WHITESPACE
+ {'conf': 'x.cylc',
'section': None,
'setting': 'a',
'value': None}
@@ -122,12 +122,12 @@ def detokenise(namespace_tokens):
"""
Examples:
Full namespace
- >>> detokenise(tokenise('x.rc[a][b][c]d=e'))
- 'x.rc[a][b][c]d=e'
- >>> detokenise(tokenise('x.rc|a'))
- 'x.rc|a'
- >>> detokenise(tokenise('x.rc'))
- 'x.rc'
+ >>> detokenise(tokenise('x.cylc[a][b][c]d=e'))
+ 'x.cylc[a][b][c]d=e'
+ >>> detokenise(tokenise('x.cylc|a'))
+ 'x.cylc|a'
+ >>> detokenise(tokenise('x.cylc'))
+ 'x.cylc'
>>> detokenise(tokenise('a'))
'a'
@@ -154,8 +154,8 @@ def partials_from_tokens(tokens):
"""
Examples:
>>> partials_from_tokens( # doctest: +NORMALIZE_WHITESPACE
- ... tokenise('x.rc[a][b][c]d=e'))
- (('conf', 'x.rc'),
+ ... tokenise('x.cylc[a][b][c]d=e'))
+ (('conf', 'x.cylc'),
('section', ('a', 'b', 'c')),
('setting', 'd'),
('value', 'e'))
@@ -175,11 +175,11 @@ def tokens_from_partials(partials):
"""
Examples:
>>> tokens_from_partials([ # doctest: +NORMALIZE_WHITESPACE
- ... ('conf', 'a.rc'),
+ ... ('conf', 'a.cylc'),
... ('section', ('b', 'c')),
... ('setting', 'd')
... ])
- {'conf': 'a.rc',
+ {'conf': 'a.cylc',
'section': ('b', 'c'),
'setting': 'd',
'value': None}
@@ -227,16 +227,16 @@ def tokens_relative(base, override):
... return detokenise(tokens_relative(
... tokenise(base), tokenise(override)))
- >>> test_tokens('a.rc[b]c', '[..]d')
- 'a.rc[b]d'
- >>> test_tokens('a.rc[b]c=d', '..=e')
- 'a.rc[b]c=e'
- >>> test_tokens('a.rc[b]c', '[..][d]e')
- 'a.rc[b][d]e'
- >>> test_tokens('a.rc[b]', '[c]d')
- 'a.rc[b][c]d'
- >>> test_tokens('a.rc[b]c=d', '[..][..]e')
- 'a.rc[b]e'
+ >>> test_tokens('a.cylc[b]c', '[..]d')
+ 'a.cylc[b]d'
+ >>> test_tokens('a.cylc[b]c=d', '..=e')
+ 'a.cylc[b]c=e'
+ >>> test_tokens('a.cylc[b]c', '[..][d]e')
+ 'a.cylc[b][d]e'
+ >>> test_tokens('a.cylc[b]', '[c]d')
+ 'a.cylc[b][c]d'
+ >>> test_tokens('a.cylc[b]c=d', '[..][..]e')
+ 'a.cylc[b]e'
"""
# ensure that base is an aboslute path
@@ -424,14 +424,14 @@ class CylcScopeDirective(SphinxDirective):
@staticmethod
def get_ref_context(namespace):
"""
- >>> CylcScopeDirective.get_ref_context('a.rc[b][c]d'
+ >>> CylcScopeDirective.get_ref_context('a.cylc[b][c]d'
... ) # doctest: +NORMALIZE_WHITESPACE
- [('cylc', 'conf', 'a.rc', None),
+ [('cylc', 'conf', 'a.cylc', None),
('cylc', 'section', ('b', 'c'), None),
('cylc', 'setting', 'd', None)]
- >>> CylcScopeDirective.get_ref_context('a.rc')
- [('cylc', 'conf', 'a.rc', None)]
+ >>> CylcScopeDirective.get_ref_context('a.cylc')
+ [('cylc', 'conf', 'a.cylc', None)]
"""
ret = []
for token, value in partials_from_tokens(tokenise(namespace)):
diff --git a/cylc/sphinx_ext/cylc_lang/lexers.py b/cylc/sphinx_ext/cylc_lang/lexers.py
index 4327ff9..e3e562a 100644
--- a/cylc/sphinx_ext/cylc_lang/lexers.py
+++ b/cylc/sphinx_ext/cylc_lang/lexers.py
@@ -23,10 +23,9 @@
class CylcLexer(RegexLexer):
- """Pygments lexer for the Cylc suite.rc language."""
+ """Pygments lexer for the Cylc language."""
- # Pygments tokens for Cylc suite.rc elements which have no direct
- # translation.
+ # Pygments tokens for flow.cylc elements which have no direct translation.
HEADING_TOKEN = Name.Tag
SETTING_TOKEN = Name.Variable
GRAPH_TASK_TOKEN = Keyword.Declaration
@@ -44,7 +43,7 @@ class CylcLexer(RegexLexer):
# Pygments values.
name = 'Cylc'
aliases = ['cylc', 'suiterc']
- filenames = ['suite.rc']
+ filenames = ['suite.rc', 'flow.cylc']
# mimetypes = ['text/x-ini', 'text/inf']
# Patterns, rules and tokens.