Skip to content

Commit

Permalink
Merge pull request #1928 from mejrs/remove_doc_cfg
Browse files Browse the repository at this point in the history
Remove doc_cfg attributes
  • Loading branch information
davidhewitt authored Oct 22, 2021
2 parents fd9b0ca + dd0bf81 commit 7b7cab1
Show file tree
Hide file tree
Showing 33 changed files with 72 additions and 139 deletions.
1 change: 0 additions & 1 deletion src/buffer.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#![cfg(not(Py_LIMITED_API))]
#![cfg_attr(docsrs, doc(cfg(not(Py_LIMITED_API))))]
// Copyright (c) 2017 Daniel Grunwald
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
Expand Down
2 changes: 0 additions & 2 deletions src/class/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ mod macros;

pub mod basic;
#[cfg(not(Py_LIMITED_API))]
#[cfg_attr(docsrs, doc(cfg(not(Py_LIMITED_API))))]
pub mod buffer;
pub mod context;
pub mod descr;
Expand All @@ -24,7 +23,6 @@ pub mod sequence;

pub use self::basic::PyObjectProtocol;
#[cfg(not(Py_LIMITED_API))]
#[cfg_attr(docsrs, doc(cfg(not(Py_LIMITED_API))))]
pub use self::buffer::PyBufferProtocol;
pub use self::context::PyContextProtocol;
pub use self::descr::PyDescrProtocol;
Expand Down
2 changes: 1 addition & 1 deletion src/conversions/eyre.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#![cfg(feature = "eyre")]
#![cfg_attr(docsrs, doc(cfg(feature = "eyre")))]

//! A conversion from [eyre]’s [`Report`] type to [`PyErr`].
//!
//! Use of an error handling library like [eyre] is common in application code and when you just
Expand Down
1 change: 0 additions & 1 deletion src/conversions/hashbrown.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#![cfg(feature = "hashbrown")]
#![cfg_attr(docsrs, doc(cfg(feature = "hashbrown")))]

//! Conversions to and from [hashbrown](https://docs.rs/hashbrown/)’s
//! `HashMap` and `HashSet`.
Expand Down
1 change: 0 additions & 1 deletion src/conversions/indexmap.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#![cfg(feature = "indexmap")]
#![cfg_attr(docsrs, doc(cfg(feature = "indexmap")))]

//! Conversions to and from [indexmap](https://docs.rs/indexmap/)’s
//! `IndexMap`.
Expand Down
10 changes: 5 additions & 5 deletions src/conversions/num_bigint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@
// based on Daniel Grunwald's https://github.com/dgrunwald/rust-cpython

#![cfg(all(feature = "num-bigint", not(any(Py_LIMITED_API, PyPy))))]
#![cfg_attr(
docsrs,
doc(cfg(all(feature = "num-bigint", not(any(Py_LIMITED_API, PyPy)))))
)]

//! Conversions to and from [num-bigint](https://docs.rs/num-bigint)’s [`BigInt`] and [`BigUint`] types.
//!
//! This is useful for converting Python integers when they may not fit in Rust's built-in integer types.
Expand Down Expand Up @@ -85,6 +80,7 @@ unsafe fn extract(ob: &PyLong, buffer: &mut [c_uchar], is_signed: c_int) -> PyRe

macro_rules! bigint_conversion {
($rust_ty: ty, $is_signed: expr, $to_bytes: path, $from_bytes: path) => {
#[cfg_attr(docsrs, doc(cfg(feature = "num-bigint")))]
impl ToPyObject for $rust_ty {
fn to_object(&self, py: Python) -> PyObject {
unsafe {
Expand All @@ -99,11 +95,15 @@ macro_rules! bigint_conversion {
}
}
}

#[cfg_attr(docsrs, doc(cfg(feature = "num-bigint")))]
impl IntoPy<PyObject> for $rust_ty {
fn into_py(self, py: Python) -> PyObject {
self.to_object(py)
}
}

#[cfg_attr(docsrs, doc(cfg(feature = "num-bigint")))]
impl<'source> FromPyObject<'source> for $rust_ty {
fn extract(ob: &'source PyAny) -> PyResult<$rust_ty> {
let py = ob.py();
Expand Down
17 changes: 9 additions & 8 deletions src/conversions/num_complex.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#![cfg(feature = "num-complex")]
#![cfg_attr(docsrs, doc(cfg(feature = "num-complex")))]

//! Conversions to and from [num-complex](https://docs.rs/num-complex)’
//! [`Complex`]`<`[`f32`]`>` and [`Complex`]`<`[`f64`]`>`.
//!
Expand Down Expand Up @@ -117,12 +117,15 @@ impl PyComplex {

macro_rules! complex_conversion {
($float: ty) => {
#[cfg_attr(docsrs, doc(cfg(feature = "num-complex")))]
impl ToPyObject for Complex<$float> {
#[inline]
fn to_object(&self, py: Python) -> PyObject {
crate::IntoPy::<PyObject>::into_py(self.to_owned(), py)
}
}

#[cfg_attr(docsrs, doc(cfg(feature = "num-complex")))]
impl crate::IntoPy<PyObject> for Complex<$float> {
fn into_py(self, py: Python) -> PyObject {
unsafe {
Expand All @@ -132,10 +135,12 @@ macro_rules! complex_conversion {
}
}
}
#[cfg(not(any(Py_LIMITED_API, PyPy)))]

#[cfg_attr(docsrs, doc(cfg(feature = "num-complex")))]
#[allow(clippy::float_cmp)] // The comparison is for an error value
impl<'source> FromPyObject<'source> for Complex<$float> {
fn extract(obj: &'source PyAny) -> PyResult<Complex<$float>> {
#[cfg(not(any(Py_LIMITED_API, PyPy)))]
unsafe {
let val = ffi::PyComplex_AsCComplex(obj.as_ptr());
if val.real == -1.0 && PyErr::occurred(obj.py()) {
Expand All @@ -144,12 +149,8 @@ macro_rules! complex_conversion {
Ok(Complex::new(val.real as $float, val.imag as $float))
}
}
}
}
#[cfg(any(Py_LIMITED_API, PyPy))]
#[allow(clippy::float_cmp)] // The comparison is for an error value
impl<'source> FromPyObject<'source> for Complex<$float> {
fn extract(obj: &'source PyAny) -> PyResult<Complex<$float>> {

#[cfg(any(Py_LIMITED_API, PyPy))]
unsafe {
let ptr = obj.as_ptr();
let real = ffi::PyComplex_RealAsDouble(ptr);
Expand Down
1 change: 0 additions & 1 deletion src/conversions/serde.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, doc(cfg(feature = "serde")))]
#![cfg(feature = "serde")]

//! Enables (de)serialization of [`Py`]`<T>` objects via [serde](https://docs.rs/serde).
Expand Down
19 changes: 18 additions & 1 deletion src/exceptions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,22 @@ macro_rules! impl_native_exception (
)
);

#[cfg(windows)]
macro_rules! impl_windows_native_exception (
($name:ident, $exc_name:ident, $doc:expr, $layout:path) => (
#[cfg(windows)]
#[doc = $doc]
#[allow(clippy::upper_case_acronyms)]
pub struct $name($crate::PyAny);

$crate::impl_exception_boilerplate!($name);
$crate::pyobject_native_type!($name, $layout, *($crate::ffi::$exc_name as *mut $crate::ffi::PyTypeObject));
);
($name:ident, $exc_name:ident, $doc:expr) => (
impl_windows_native_exception!($name, $exc_name, $doc, $crate::ffi::PyBaseExceptionObject);
)
);

macro_rules! native_doc(
($name: literal, $alt: literal) => (
concat!(
Expand Down Expand Up @@ -516,8 +532,9 @@ impl_native_exception!(
native_doc!("EnvironmentError")
);
impl_native_exception!(PyIOError, PyExc_IOError, native_doc!("IOError"));

#[cfg(windows)]
impl_native_exception!(
impl_windows_native_exception!(
PyWindowsError,
PyExc_WindowsError,
native_doc!("WindowsError")
Expand Down
1 change: 0 additions & 1 deletion src/ffi/abstract_.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,6 @@ extern "C" {
#[cfg_attr(PyPy, link_name = "PyPyIter_Next")]
pub fn PyIter_Next(arg1: *mut PyObject) -> *mut PyObject;
#[cfg(all(not(PyPy), Py_3_10))]
#[cfg_attr(docsrs, doc(cfg(all(not(PyPy), Py_3_10))))]
pub fn PyIter_Send(iter: *mut PyObject, arg: *mut PyObject, presult: *mut *mut PyObject);

#[cfg_attr(PyPy, link_name = "PyPyNumber_Check")]
Expand Down
6 changes: 0 additions & 6 deletions src/ffi/cpython/pystate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,17 +43,14 @@ extern "C" {
// skipped _PyThread_CurrentExceptions

#[cfg(not(PyPy))]
#[cfg_attr(docsrs, doc(cfg(not(PyPy))))]
pub fn PyInterpreterState_Main() -> *mut PyInterpreterState;
#[cfg_attr(PyPy, link_name = "PyPyInterpreterState_Head")]
pub fn PyInterpreterState_Head() -> *mut PyInterpreterState;
#[cfg_attr(PyPy, link_name = "PyPyInterpreterState_Next")]
pub fn PyInterpreterState_Next(interp: *mut PyInterpreterState) -> *mut PyInterpreterState;
#[cfg(not(PyPy))]
#[cfg_attr(docsrs, doc(cfg(not(PyPy))))]
pub fn PyInterpreterState_ThreadHead(interp: *mut PyInterpreterState) -> *mut PyThreadState;
#[cfg(not(PyPy))]
#[cfg_attr(docsrs, doc(cfg(not(PyPy))))]
pub fn PyThreadState_Next(tstate: *mut PyThreadState) -> *mut PyThreadState;

#[cfg(py_sys_config = "WITH_THREAD")]
Expand All @@ -62,7 +59,6 @@ extern "C" {
}

#[cfg(Py_3_9)]
#[cfg_attr(docsrs, doc(cfg(Py_3_9)))]
pub type _PyFrameEvalFunction = extern "C" fn(
*mut crate::ffi::PyThreadState,
*mut crate::ffi::PyFrameObject,
Expand All @@ -72,13 +68,11 @@ pub type _PyFrameEvalFunction = extern "C" fn(
#[cfg(Py_3_9)]
extern "C" {
/// Get the frame evaluation function.
#[cfg_attr(docsrs, doc(cfg(Py_3_9)))]
pub fn _PyInterpreterState_GetEvalFrameFunc(
interp: *mut PyInterpreterState,
) -> _PyFrameEvalFunction;

///Set the frame evaluation function.
#[cfg_attr(docsrs, doc(cfg(Py_3_9)))]
pub fn _PyInterpreterState_SetEvalFrameFunc(
interp: *mut PyInterpreterState,
eval_frame: _PyFrameEvalFunction,
Expand Down
32 changes: 15 additions & 17 deletions src/ffi/cpython/unicodeobject.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ pub struct PyASCIIObject {
///
/// In addition, they are disabled on big-endian architectures to restrict this to most "common"
/// platforms, which are at least tested on CI and appear to be sound.
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
impl PyASCIIObject {
#[inline]
pub unsafe fn interned(&self) -> c_uint {
Expand Down Expand Up @@ -117,7 +117,7 @@ pub const SSTATE_INTERNED_MORTAL: c_uint = 1;
pub const SSTATE_INTERNED_IMMORTAL: c_uint = 2;

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_IS_ASCII(op: *mut PyObject) -> c_uint {
debug_assert!(crate::ffi::PyUnicode_Check(op) != 0);
debug_assert!(PyUnicode_IS_READY(op) != 0);
Expand All @@ -126,13 +126,13 @@ pub unsafe fn PyUnicode_IS_ASCII(op: *mut PyObject) -> c_uint {
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_IS_COMPACT(op: *mut PyObject) -> c_uint {
(*(op as *mut PyASCIIObject)).compact()
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_IS_COMPACT_ASCII(op: *mut PyObject) -> c_uint {
if (*(op as *mut PyASCIIObject)).ascii() != 0 && PyUnicode_IS_COMPACT(op) != 0 {
1
Expand All @@ -150,25 +150,25 @@ pub const PyUnicode_2BYTE_KIND: c_uint = 2;
pub const PyUnicode_4BYTE_KIND: c_uint = 4;

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_1BYTE_DATA(op: *mut PyObject) -> *mut Py_UCS1 {
PyUnicode_DATA(op) as *mut Py_UCS1
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_2BYTE_DATA(op: *mut PyObject) -> *mut Py_UCS2 {
PyUnicode_DATA(op) as *mut Py_UCS2
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_4BYTE_DATA(op: *mut PyObject) -> *mut Py_UCS4 {
PyUnicode_DATA(op) as *mut Py_UCS4
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_KIND(op: *mut PyObject) -> c_uint {
debug_assert!(crate::ffi::PyUnicode_Check(op) != 0);
debug_assert!(PyUnicode_IS_READY(op) != 0);
Expand All @@ -177,7 +177,7 @@ pub unsafe fn PyUnicode_KIND(op: *mut PyObject) -> c_uint {
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn _PyUnicode_COMPACT_DATA(op: *mut PyObject) -> *mut c_void {
if PyUnicode_IS_ASCII(op) != 0 {
(op as *mut PyASCIIObject).offset(1) as *mut c_void
Expand All @@ -187,15 +187,15 @@ pub unsafe fn _PyUnicode_COMPACT_DATA(op: *mut PyObject) -> *mut c_void {
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn _PyUnicode_NONCOMPACT_DATA(op: *mut PyObject) -> *mut c_void {
debug_assert!(!(*(op as *mut PyUnicodeObject)).data.any.is_null());

(*(op as *mut PyUnicodeObject)).data.any
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_DATA(op: *mut PyObject) -> *mut c_void {
debug_assert!(crate::ffi::PyUnicode_Check(op) != 0);

Expand All @@ -211,7 +211,7 @@ pub unsafe fn PyUnicode_DATA(op: *mut PyObject) -> *mut c_void {
// skipped PyUnicode_READ_CHAR

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_GET_LENGTH(op: *mut PyObject) -> Py_ssize_t {
debug_assert!(crate::ffi::PyUnicode_Check(op) != 0);
debug_assert!(PyUnicode_IS_READY(op) != 0);
Expand All @@ -220,15 +220,15 @@ pub unsafe fn PyUnicode_GET_LENGTH(op: *mut PyObject) -> Py_ssize_t {
}

#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_IS_READY(op: *mut PyObject) -> c_uint {
(*(op as *mut PyASCIIObject)).ready()
}

#[cfg(not(Py_3_12))]
#[cfg_attr(Py_3_10, deprecated(note = "Python 3.10"))]
#[inline]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
pub unsafe fn PyUnicode_READY(op: *mut PyObject) -> c_int {
debug_assert!(crate::ffi::PyUnicode_Check(op) != 0);

Expand All @@ -252,7 +252,6 @@ extern "C" {
// skipped _PyUnicode_Copy

#[cfg(not(PyPy))]
#[cfg_attr(docsrs, doc(cfg(not(PyPy))))]
pub fn PyUnicode_CopyCharacters(
to: *mut PyObject,
to_start: Py_ssize_t,
Expand All @@ -264,7 +263,6 @@ extern "C" {
// skipped _PyUnicode_FastCopyCharacters

#[cfg(not(PyPy))]
#[cfg_attr(docsrs, doc(cfg(not(PyPy))))]
pub fn PyUnicode_Fill(
unicode: *mut PyObject,
start: Py_ssize_t,
Expand Down Expand Up @@ -489,7 +487,7 @@ extern "C" {
// skipped _PyUnicode_ScanIdentifier

#[cfg(test)]
#[cfg(not(target_endian = "big"))]
#[cfg(target_endian = "little")]
mod tests {
use super::*;
use crate::types::PyString;
Expand Down
1 change: 0 additions & 1 deletion src/ffi/modsupport.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ extern "C" {
// skipped non-limited _PyArg_Fini

#[cfg(Py_3_10)]
#[cfg_attr(docsrs, doc(cfg(Py_3_10)))]
pub fn PyModule_AddObjectRef(
module: *mut PyObject,
name: *const c_char,
Expand Down
Loading

0 comments on commit 7b7cab1

Please sign in to comment.