Skip to content

Commit

Permalink
Merge pull request #6120 from openjournals/joss.07334
Browse files Browse the repository at this point in the history
Merging automatically
  • Loading branch information
editorialbot authored Nov 12, 2024
2 parents 5f3935e + ab51187 commit b6c4d48
Show file tree
Hide file tree
Showing 3 changed files with 768 additions and 0 deletions.
306 changes: 306 additions & 0 deletions joss.07334/10.21105.joss.07334.crossref.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,306 @@
<?xml version="1.0" encoding="UTF-8"?>
<doi_batch xmlns="http://www.crossref.org/schema/5.3.1"
xmlns:ai="http://www.crossref.org/AccessIndicators.xsd"
xmlns:rel="http://www.crossref.org/relations.xsd"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
version="5.3.1"
xsi:schemaLocation="http://www.crossref.org/schema/5.3.1 http://www.crossref.org/schemas/crossref5.3.1.xsd">
<head>
<doi_batch_id>20241112151341-b7717bd99852c6c0126a409df3efe730da6fec85</doi_batch_id>
<timestamp>20241112151341</timestamp>
<depositor>
<depositor_name>JOSS Admin</depositor_name>
<email_address>admin@theoj.org</email_address>
</depositor>
<registrant>The Open Journal</registrant>
</head>
<body>
<journal>
<journal_metadata>
<full_title>Journal of Open Source Software</full_title>
<abbrev_title>JOSS</abbrev_title>
<issn media_type="electronic">2475-9066</issn>
<doi_data>
<doi>10.21105/joss</doi>
<resource>https://joss.theoj.org</resource>
</doi_data>
</journal_metadata>
<journal_issue>
<publication_date media_type="online">
<month>11</month>
<year>2024</year>
</publication_date>
<journal_volume>
<volume>9</volume>
</journal_volume>
<issue>103</issue>
</journal_issue>
<journal_article publication_type="full_text">
<titles>
<title>DiscreteEntropy.jl: Entropy Estimation of Discrete
Random Variables with Julia</title>
</titles>
<contributors>
<person_name sequence="first" contributor_role="author">
<given_name>David A.</given_name>
<surname>Kelly</surname>
<affiliations>
<institution><institution_name>King’s College London, UK</institution_name></institution>
</affiliations>
<ORCID>https://orcid.org/0000-0002-5368-6769</ORCID>
</person_name>
<person_name sequence="additional"
contributor_role="author">
<given_name>Ilaria Pia La</given_name>
<surname>Torre</surname>
<affiliations>
<institution><institution_name>University College London, UK</institution_name></institution>
</affiliations>
<ORCID>https://orcid.org/0009-0006-2733-5283</ORCID>
</person_name>
</contributors>
<publication_date>
<month>11</month>
<day>12</day>
<year>2024</year>
</publication_date>
<pages>
<first_page>7334</first_page>
</pages>
<publisher_item>
<identifier id_type="doi">10.21105/joss.07334</identifier>
</publisher_item>
<ai:program name="AccessIndicators">
<ai:license_ref applies_to="vor">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
<ai:license_ref applies_to="am">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
<ai:license_ref applies_to="tdm">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
</ai:program>
<rel:program>
<rel:related_item>
<rel:description>Software archive</rel:description>
<rel:inter_work_relation relationship-type="references" identifier-type="doi">10.5281/zenodo.14067392</rel:inter_work_relation>
</rel:related_item>
<rel:related_item>
<rel:description>GitHub review issue</rel:description>
<rel:inter_work_relation relationship-type="hasReview" identifier-type="uri">https://github.com/openjournals/joss-reviews/issues/7334</rel:inter_work_relation>
</rel:related_item>
</rel:program>
<doi_data>
<doi>10.21105/joss.07334</doi>
<resource>https://joss.theoj.org/papers/10.21105/joss.07334</resource>
<collection property="text-mining">
<item>
<resource mime_type="application/pdf">https://joss.theoj.org/papers/10.21105/joss.07334.pdf</resource>
</item>
</collection>
</doi_data>
<citation_list>
<citation key="basharin">
<article_title>On a statistical estimate for the entropy of
a sequence of independent random variables</article_title>
<author>Basharin</author>
<journal_title>Theory of Probability &amp; Its
Applications</journal_title>
<issue>3</issue>
<volume>4</volume>
<doi>10.1137/1104033</doi>
<cYear>1959</cYear>
<unstructured_citation>Basharin, G. P. (1959). On a
statistical estimate for the entropy of a sequence of independent random
variables. Theory of Probability &amp; Its Applications, 4(3), 333–336.
https://doi.org/10.1137/1104033</unstructured_citation>
</citation>
<citation key="bohme:fse:2020">
<article_title>Boosting fuzzer efficiency: An information
theoretic perspective</article_title>
<author>Böhme</author>
<journal_title>Proceedings of the 14th joint meeting of the
european software engineering conference and the ACM SIGSOFT symposium
on the foundations of software engineering</journal_title>
<doi>10.1145/3368089.3409748</doi>
<cYear>2020</cYear>
<unstructured_citation>Böhme, M., Manès, V., &amp; Cha, S.
K. (2020). Boosting fuzzer efficiency: An information theoretic
perspective. Proceedings of the 14th Joint Meeting of the European
Software Engineering Conference and the ACM SIGSOFT Symposium on the
Foundations of Software Engineering, 970–981.
https://doi.org/10.1145/3368089.3409748</unstructured_citation>
</citation>
<citation key="blackwell2025hyperfuzzing">
<article_title>Hyperfuzzing: Black-box security hypertesting
with a grey-box fuzzer</article_title>
<author>Blackwell</author>
<journal_title>Empirical Software
Engineering</journal_title>
<issue>1</issue>
<volume>30</volume>
<doi>10.1007/s10664-024-10556-3</doi>
<cYear>2025</cYear>
<unstructured_citation>Blackwell, D., Becker, I., &amp;
Clark, D. (2025). Hyperfuzzing: Black-box security hypertesting with a
grey-box fuzzer. Empirical Software Engineering, 30(1), 1–28.
https://doi.org/10.1007/s10664-024-10556-3</unstructured_citation>
</citation>
<citation key="MacKay2003">
<volume_title>Information theory, inference, and learning
algorithms</volume_title>
<author>MacKay</author>
<doi>10.1109/tit.2004.834752</doi>
<cYear>2003</cYear>
<unstructured_citation>MacKay, D. J. C. (2003). Information
theory, inference, and learning algorithms. Cambridge University Press.
https://doi.org/10.1109/tit.2004.834752</unstructured_citation>
</citation>
<citation key="Rodriguez2021EntropyEst">
<article_title>Selecting an effective entropy estimator for
short sequences of bits and bytes with maximum entropy</article_title>
<author>Contreras Rodrı́guez</author>
<journal_title>Entropy</journal_title>
<issue>5</issue>
<volume>23</volume>
<doi>10.3390/e23050561</doi>
<cYear>2021</cYear>
<unstructured_citation>Contreras Rodrı́guez, L.,
Madarro-Capó, E. J., Legón-Pérez, C. M., Rojas, O., &amp; Sosa-Gómez, G.
(2021). Selecting an effective entropy estimator for short sequences of
bits and bytes with maximum entropy. Entropy, 23(5), 561.
https://doi.org/10.3390/e23050561</unstructured_citation>
</citation>
<citation key="grassberger2008entropy">
<article_title>Entropy estimates from insufficient
samplings</article_title>
<author>Grassberger</author>
<cYear>2008</cYear>
<unstructured_citation>Grassberger, P. (2008). Entropy
estimates from insufficient samplings.
https://arxiv.org/abs/physics/0307138</unstructured_citation>
</citation>
<citation key="chaoshen">
<article_title>Nonparametric estimation of Shannon’s
diversity index when there are unseen species in sample. Environ ecol
stat 10: 429-443</article_title>
<author>Chao</author>
<journal_title>Environmental and Ecological
Statistics</journal_title>
<volume>10</volume>
<doi>10.1023/A:1026096204727</doi>
<cYear>2003</cYear>
<unstructured_citation>Chao, A., &amp; Shen, T.-J. (2003).
Nonparametric estimation of Shannon’s diversity index when there are
unseen species in sample. Environ ecol stat 10: 429-443. Environmental
and Ecological Statistics, 10, 429–443.
https://doi.org/10.1023/A:1026096204727</unstructured_citation>
</citation>
<citation key="nemenman2002entropy">
<article_title>Entropy and inference,
revisited</article_title>
<author>Nemenman</author>
<journal_title>Proceedings of the 14th international
conference on neural information processing systems: Natural and
synthetic</journal_title>
<doi>10.7551/mitpress/1120.003.0065</doi>
<cYear>2001</cYear>
<unstructured_citation>Nemenman, I., Shafee, F., &amp;
Bialek, W. (2001). Entropy and inference, revisited. Proceedings of the
14th International Conference on Neural Information Processing Systems:
Natural and Synthetic, 471–478.
https://doi.org/10.7551/mitpress/1120.003.0065</unstructured_citation>
</citation>
<citation key="zhang">
<article_title>Entropy estimation in Turing’s
perspective</article_title>
<author>Zhang</author>
<journal_title>Neural Computation</journal_title>
<issue>5</issue>
<volume>24</volume>
<doi>10.1162/NECO_a_00266</doi>
<issn>0899-7667</issn>
<cYear>2012</cYear>
<unstructured_citation>Zhang, Z. (2012). Entropy estimation
in Turing’s perspective. Neural Computation, 24(5), 1368–1389.
https://doi.org/10.1162/NECO_a_00266</unstructured_citation>
</citation>
<citation key="Cover2006">
<volume_title>Elements of information theory 2nd edition
(Wiley series in telecommunications and signal
processing)</volume_title>
<author>Cover</author>
<isbn>0471241954</isbn>
<cYear>2006</cYear>
<unstructured_citation>Cover, T. M., &amp; Thomas, J. A.
(2006). Elements of information theory 2nd edition (Wiley series in
telecommunications and signal processing). Hardcover;
Wiley-Interscience. ISBN: 0471241954</unstructured_citation>
</citation>
<citation key="hausser2009entropy">
<article_title>Entropy inference and the James-Stein
estimator, with application to nonlinear gene association
networks</article_title>
<author>Hausser</author>
<cYear>2009</cYear>
<unstructured_citation>Hausser, J., &amp; Strimmer, K.
(2009). Entropy inference and the James-Stein estimator, with
application to nonlinear gene association networks.
https://arxiv.org/abs/0811.3579</unstructured_citation>
</citation>
<citation key="pym">
<article_title>PYM entropy estimator MATLAB reference
implementation</article_title>
<author>Pillowlab</author>
<journal_title>GitHub repository</journal_title>
<cYear>2020</cYear>
<unstructured_citation>Pillowlab. (2020). PYM entropy
estimator MATLAB reference implementation. In GitHub repository. GitHub.
https://github.com/pillowlab/PYMentropy/</unstructured_citation>
</citation>
<citation key="bub">
<article_title>BUB</article_title>
<author>Paninski</author>
<unstructured_citation>Paninski, L. BUB.
http://www.stat.columbia.edu/~liam/research/code/BUBfunc.m [Accessed
2024-10-22].</unstructured_citation>
</citation>
<citation key="nsb">
<article_title>NSB entropy estimation</article_title>
<author>Nemenman</author>
<unstructured_citation>Nemenman, I. NSB entropy estimation.
https://sourceforge.net/projects/nsb-entropy/ [Accessed
2024-10-24].</unstructured_citation>
</citation>
<citation key="unseenimp">
<article_title>Unseen</article_title>
<author>Valiant</author>
<unstructured_citation>Valiant, P., &amp; Valiant, G.
Unseen. https://theory.stanford.edu/~valiant/code.html [Accessed
2024-10-24].</unstructured_citation>
</citation>
<citation key="ndd">
<article_title>ndd - Bayesian entropy estimation from
discrete data</article_title>
<author>Marsili</author>
<journal_title>GitHub repository</journal_title>
<cYear>2021</cYear>
<unstructured_citation>Marsili, S. (2021). ndd - Bayesian
entropy estimation from discrete data. In GitHub repository. GitHub.
https://github.com/simomarsili/ndd</unstructured_citation>
</citation>
<citation key="entropart">
<article_title>entropart: An R package to measure and
partition diversity</article_title>
<author>Marcon</author>
<journal_title>Journal of Statistical
Software</journal_title>
<issue>8</issue>
<volume>67</volume>
<doi>10.18637/jss.v067.i08</doi>
<cYear>2015</cYear>
<unstructured_citation>Marcon, E., &amp; Hérault, B. (2015).
entropart: An R package to measure and partition diversity. Journal of
Statistical Software, 67(8), 1–26.
https://doi.org/10.18637/jss.v067.i08</unstructured_citation>
</citation>
</citation_list>
</journal_article>
</journal>
</body>
</doi_batch>
Binary file added joss.07334/10.21105.joss.07334.pdf
Binary file not shown.
Loading

0 comments on commit b6c4d48

Please sign in to comment.