embassy/embassy-rp/src/pio.rs

1006 lines
29 KiB
Rust
Raw Normal View History

2022-07-27 20:45:46 +00:00
use core::future::Future;
use core::marker::PhantomData;
use core::pin::Pin as FuturePin;
use core::sync::atomic::{compiler_fence, Ordering};
use core::task::{Context, Poll};
use atomic_polyfill::{AtomicU32, AtomicU8};
2022-07-27 20:45:46 +00:00
use embassy_cortex_m::interrupt::{Interrupt, InterruptExt};
use embassy_hal_common::{Peripheral, PeripheralRef};
2022-07-27 20:45:46 +00:00
use embassy_sync::waitqueue::AtomicWaker;
use pac::io::vals::Gpio0ctrlFuncsel;
2022-07-27 20:45:46 +00:00
use crate::dma::{Channel, Transfer, Word};
2022-07-27 20:45:46 +00:00
use crate::gpio::sealed::Pin as SealedPin;
use crate::gpio::{Drive, Pin, Pull, SlewRate};
use crate::pac::dma::vals::TreqSel;
use crate::pio::sealed::PioInstance as _;
use crate::{interrupt, pac, peripherals, RegExt};
struct Wakers([AtomicWaker; 12]);
impl Wakers {
#[inline(always)]
fn fifo_in(&self) -> &[AtomicWaker] {
&self.0[0..4]
}
#[inline(always)]
fn fifo_out(&self) -> &[AtomicWaker] {
&self.0[4..8]
}
#[inline(always)]
fn irq(&self) -> &[AtomicWaker] {
&self.0[8..12]
}
}
2022-07-27 20:45:46 +00:00
const NEW_AW: AtomicWaker = AtomicWaker::new();
const PIO_WAKERS_INIT: Wakers = Wakers([NEW_AW; 12]);
static WAKERS: [Wakers; 2] = [PIO_WAKERS_INIT; 2];
2022-07-27 20:45:46 +00:00
pub enum FifoJoin {
/// Both TX and RX fifo is enabled
Duplex,
/// Rx fifo twice as deep. TX fifo disabled
RxOnly,
/// Tx fifo twice as deep. RX fifo disabled
TxOnly,
}
#[derive(PartialEq)]
pub enum ShiftDirection {
Right = 1,
Left = 0,
}
const RXNEMPTY_MASK: u32 = 1 << 0;
const TXNFULL_MASK: u32 = 1 << 4;
const SMIRQ_MASK: u32 = 1 << 8;
#[interrupt]
unsafe fn PIO0_IRQ_0() {
use crate::pac;
let ints = pac::PIO0.irqs(0).ints().read().0;
for bit in 0..12 {
if ints & (1 << bit) != 0 {
WAKERS[0].0[bit].wake();
2022-07-27 20:45:46 +00:00
}
}
pac::PIO0.irqs(0).inte().write_clear(|m| m.0 = ints);
2022-07-27 20:45:46 +00:00
}
#[interrupt]
unsafe fn PIO1_IRQ_0() {
use crate::pac;
let ints = pac::PIO1.irqs(0).ints().read().0;
for bit in 0..12 {
if ints & (1 << bit) != 0 {
WAKERS[1].0[bit].wake();
2022-07-27 20:45:46 +00:00
}
}
pac::PIO1.irqs(0).inte().write_clear(|m| m.0 = ints);
2022-07-27 20:45:46 +00:00
}
pub(crate) unsafe fn init() {
let irq = interrupt::PIO0_IRQ_0::steal();
irq.disable();
irq.set_priority(interrupt::Priority::P3);
pac::PIO0.irqs(0).inte().write(|m| m.0 = 0);
irq.enable();
let irq = interrupt::PIO1_IRQ_0::steal();
irq.disable();
irq.set_priority(interrupt::Priority::P3);
pac::PIO1.irqs(0).inte().write(|m| m.0 = 0);
irq.enable();
}
2022-07-27 20:45:46 +00:00
/// Future that waits for TX-FIFO to become writable
2023-02-24 19:01:41 +00:00
#[must_use = "futures do nothing unless you `.await` or poll them"]
2022-07-27 20:45:46 +00:00
pub struct FifoOutFuture<'a, PIO: PioInstance, SM: PioStateMachine + Unpin> {
sm: &'a mut SM,
pio: PhantomData<PIO>,
value: u32,
}
impl<'a, PIO: PioInstance, SM: PioStateMachine + Unpin> FifoOutFuture<'a, PIO, SM> {
pub fn new(sm: &'a mut SM, value: u32) -> Self {
FifoOutFuture {
sm,
pio: PhantomData::default(),
value,
}
}
}
impl<'d, PIO: PioInstance, SM: PioStateMachine + Unpin> Future for FifoOutFuture<'d, PIO, SM> {
type Output = ();
fn poll(self: FuturePin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
//debug!("Poll {},{}", PIO::PIO_NO, SM);
let value = self.value;
if self.get_mut().sm.try_push_tx(value) {
Poll::Ready(())
} else {
WAKERS[PIO::PIO_NO as usize].fifo_out()[SM::SM as usize].register(cx.waker());
2022-07-27 20:45:46 +00:00
unsafe {
PIO::PIO.irqs(0).inte().write_set(|m| {
m.0 = TXNFULL_MASK << SM::SM;
2022-07-27 20:45:46 +00:00
});
}
// debug!("Pending");
Poll::Pending
}
}
}
impl<'d, PIO: PioInstance, SM: PioStateMachine + Unpin> Drop for FifoOutFuture<'d, PIO, SM> {
fn drop(&mut self) {
unsafe {
PIO::PIO.irqs(0).inte().write_clear(|m| {
m.0 = TXNFULL_MASK << SM::SM;
2022-07-27 20:45:46 +00:00
});
}
}
}
/// Future that waits for RX-FIFO to become readable
2023-02-24 19:01:41 +00:00
#[must_use = "futures do nothing unless you `.await` or poll them"]
2022-07-27 20:45:46 +00:00
pub struct FifoInFuture<'a, PIO: PioInstance, SM: PioStateMachine> {
sm: &'a mut SM,
pio: PhantomData<PIO>,
}
impl<'a, PIO: PioInstance, SM: PioStateMachine> FifoInFuture<'a, PIO, SM> {
pub fn new(sm: &'a mut SM) -> Self {
FifoInFuture {
sm,
pio: PhantomData::default(),
}
}
}
impl<'d, PIO: PioInstance, SM: PioStateMachine> Future for FifoInFuture<'d, PIO, SM> {
type Output = u32;
fn poll(mut self: FuturePin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
//debug!("Poll {},{}", PIO::PIO_NO, SM);
if let Some(v) = self.sm.try_pull_rx() {
Poll::Ready(v)
} else {
WAKERS[PIO::PIO_NO as usize].fifo_in()[SM::SM].register(cx.waker());
2022-07-27 20:45:46 +00:00
unsafe {
PIO::PIO.irqs(0).inte().write_set(|m| {
m.0 = RXNEMPTY_MASK << SM::SM;
2022-07-27 20:45:46 +00:00
});
}
//debug!("Pending");
Poll::Pending
}
}
}
impl<'d, PIO: PioInstance, SM: PioStateMachine> Drop for FifoInFuture<'d, PIO, SM> {
fn drop(&mut self) {
unsafe {
PIO::PIO.irqs(0).inte().write_clear(|m| {
m.0 = RXNEMPTY_MASK << SM::SM;
2022-07-27 20:45:46 +00:00
});
}
}
}
/// Future that waits for IRQ
2023-02-24 19:01:41 +00:00
#[must_use = "futures do nothing unless you `.await` or poll them"]
2022-07-27 20:45:46 +00:00
pub struct IrqFuture<PIO: PioInstance> {
pio: PhantomData<PIO>,
irq_no: u8,
}
impl<'a, PIO: PioInstance> IrqFuture<PIO> {
pub fn new(irq_no: u8) -> Self {
IrqFuture {
pio: PhantomData::default(),
irq_no,
}
}
}
impl<'d, PIO: PioInstance> Future for IrqFuture<PIO> {
type Output = ();
fn poll(self: FuturePin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
//debug!("Poll {},{}", PIO::PIO_NO, SM);
// Check if IRQ flag is already set
if critical_section::with(|_| unsafe {
let irq_flags = PIO::PIO.irq();
2022-07-27 20:45:46 +00:00
if irq_flags.read().0 & (1 << self.irq_no) != 0 {
irq_flags.write(|m| {
m.0 = 1 << self.irq_no;
});
true
} else {
false
}
}) {
return Poll::Ready(());
}
WAKERS[PIO::PIO_NO as usize].irq()[self.irq_no as usize].register(cx.waker());
2022-07-27 20:45:46 +00:00
unsafe {
PIO::PIO.irqs(0).inte().write_set(|m| {
m.0 = SMIRQ_MASK << self.irq_no;
2022-07-27 20:45:46 +00:00
});
}
Poll::Pending
}
}
impl<'d, PIO: PioInstance> Drop for IrqFuture<PIO> {
fn drop(&mut self) {
unsafe {
PIO::PIO.irqs(0).inte().write_clear(|m| {
m.0 = SMIRQ_MASK << self.irq_no;
2022-07-27 20:45:46 +00:00
});
}
}
}
pub struct PioPin<PIO: PioInstance> {
pin_bank: u8,
pio: PhantomData<PIO>,
}
impl<PIO: PioInstance> PioPin<PIO> {
/// Set the pin's drive strength.
#[inline]
pub fn set_drive_strength(&mut self, strength: Drive) {
unsafe {
self.pad_ctrl().modify(|w| {
w.set_drive(match strength {
Drive::_2mA => pac::pads::vals::Drive::_2MA,
Drive::_4mA => pac::pads::vals::Drive::_4MA,
Drive::_8mA => pac::pads::vals::Drive::_8MA,
Drive::_12mA => pac::pads::vals::Drive::_12MA,
});
});
}
}
// Set the pin's slew rate.
#[inline]
pub fn set_slew_rate(&mut self, slew_rate: SlewRate) {
unsafe {
self.pad_ctrl().modify(|w| {
w.set_slewfast(slew_rate == SlewRate::Fast);
});
}
}
/// Set the pin's pull.
#[inline]
pub fn set_pull(&mut self, pull: Pull) {
unsafe {
self.pad_ctrl().modify(|w| {
w.set_pue(pull == Pull::Up);
w.set_pde(pull == Pull::Down);
2022-07-27 20:45:46 +00:00
});
}
}
/// Set the pin's schmitt trigger.
2022-07-27 20:45:46 +00:00
#[inline]
pub fn set_schmitt(&mut self, enable: bool) {
unsafe {
self.pad_ctrl().modify(|w| {
w.set_schmitt(enable);
});
}
}
pub fn set_input_sync_bypass<'a>(&mut self, bypass: bool) {
let mask = 1 << self.pin();
unsafe {
if bypass {
PIO::PIO.input_sync_bypass().write_set(|w| *w = mask);
} else {
PIO::PIO.input_sync_bypass().write_clear(|w| *w = mask);
}
2022-07-27 20:45:46 +00:00
}
}
pub fn pin(&self) -> u8 {
self._pin()
}
}
impl<PIO: PioInstance> SealedPin for PioPin<PIO> {
fn pin_bank(&self) -> u8 {
self.pin_bank
}
}
pub struct PioStateMachineInstance<'d, PIO: PioInstance, const SM: usize> {
pio: PhantomData<&'d PIO>,
2022-07-27 20:45:46 +00:00
}
impl<'d, PIO: PioInstance, const SM: usize> Drop for PioStateMachineInstance<'d, PIO, SM> {
fn drop(&mut self) {
2023-05-03 09:21:58 +00:00
unsafe {
PIO::PIO.ctrl().write_clear(|w| w.set_sm_enable(1 << SM));
}
on_pio_drop::<PIO>();
}
}
impl<'d, PIO: PioInstance, const SM: usize> sealed::PioStateMachine for PioStateMachineInstance<'d, PIO, SM> {
2022-07-27 20:45:46 +00:00
type Pio = PIO;
const SM: usize = SM;
2022-07-27 20:45:46 +00:00
}
impl<'d, PIO: PioInstance, const SM: usize> PioStateMachine for PioStateMachineInstance<'d, PIO, SM> {}
2022-07-27 20:45:46 +00:00
pub trait PioStateMachine: sealed::PioStateMachine + Sized + Unpin {
2022-07-27 20:45:46 +00:00
fn pio_no(&self) -> u8 {
Self::Pio::PIO_NO
}
fn sm_no(&self) -> u8 {
Self::SM as u8
2022-07-27 20:45:46 +00:00
}
fn restart(&mut self) {
let mask = 1u8 << Self::SM;
2022-07-27 20:45:46 +00:00
unsafe {
Self::Pio::PIO.ctrl().write_set(|w| w.set_sm_restart(mask));
2022-07-27 20:45:46 +00:00
}
}
fn set_enable(&mut self, enable: bool) {
let mask = 1u8 << Self::SM;
2022-07-27 20:45:46 +00:00
unsafe {
if enable {
Self::Pio::PIO.ctrl().write_set(|w| w.set_sm_enable(mask));
} else {
Self::Pio::PIO.ctrl().write_clear(|w| w.set_sm_enable(mask));
}
2022-07-27 20:45:46 +00:00
}
}
fn is_enabled(&self) -> bool {
unsafe { Self::Pio::PIO.ctrl().read().sm_enable() & (1u8 << Self::SM) != 0 }
2022-07-27 20:45:46 +00:00
}
fn is_tx_empty(&self) -> bool {
unsafe { Self::Pio::PIO.fstat().read().txempty() & (1u8 << Self::SM) != 0 }
2022-07-27 20:45:46 +00:00
}
fn is_tx_full(&self) -> bool {
unsafe { Self::Pio::PIO.fstat().read().txfull() & (1u8 << Self::SM) != 0 }
2022-07-27 20:45:46 +00:00
}
fn is_rx_empty(&self) -> bool {
unsafe { Self::Pio::PIO.fstat().read().rxempty() & (1u8 << Self::SM) != 0 }
2022-07-27 20:45:46 +00:00
}
fn is_rx_full(&self) -> bool {
unsafe { Self::Pio::PIO.fstat().read().rxfull() & (1u8 << Self::SM) != 0 }
2022-07-27 20:45:46 +00:00
}
fn tx_level(&self) -> u8 {
unsafe {
let flevel = Self::Pio::PIO.flevel().read().0;
(flevel >> (Self::SM * 8)) as u8 & 0x0f
2022-07-27 20:45:46 +00:00
}
}
fn rx_level(&self) -> u8 {
unsafe {
let flevel = Self::Pio::PIO.flevel().read().0;
(flevel >> (Self::SM * 8 + 4)) as u8 & 0x0f
2022-07-27 20:45:46 +00:00
}
}
fn push_tx(&mut self, v: u32) {
unsafe {
Self::Pio::PIO.txf(Self::SM).write_value(v);
2022-07-27 20:45:46 +00:00
}
}
fn try_push_tx(&mut self, v: u32) -> bool {
if self.is_tx_full() {
return false;
}
self.push_tx(v);
true
}
fn pull_rx(&mut self) -> u32 {
unsafe { Self::Pio::PIO.rxf(Self::SM).read() }
2022-07-27 20:45:46 +00:00
}
fn try_pull_rx(&mut self) -> Option<u32> {
if self.is_rx_empty() {
return None;
}
Some(self.pull_rx())
}
fn set_clkdiv(&mut self, div_x_256: u32) {
unsafe {
Self::this_sm().clkdiv().write(|w| w.0 = div_x_256 << 8);
2022-07-27 20:45:46 +00:00
}
}
fn get_clkdiv(&self) -> u32 {
unsafe { Self::this_sm().clkdiv().read().0 >> 8 }
2022-07-27 20:45:46 +00:00
}
fn clkdiv_restart(&mut self) {
let mask = 1u8 << Self::SM;
2022-07-27 20:45:46 +00:00
unsafe {
Self::Pio::PIO.ctrl().write_set(|w| w.set_clkdiv_restart(mask));
2022-07-27 20:45:46 +00:00
}
}
fn set_side_enable(&self, enable: bool) {
unsafe {
Self::this_sm().execctrl().modify(|w| w.set_side_en(enable));
2022-07-27 20:45:46 +00:00
}
}
fn is_side_enabled(&self) -> bool {
unsafe { Self::this_sm().execctrl().read().side_en() }
2022-07-27 20:45:46 +00:00
}
fn set_side_pindir(&mut self, pindir: bool) {
unsafe {
Self::this_sm().execctrl().modify(|w| w.set_side_pindir(pindir));
2022-07-27 20:45:46 +00:00
}
}
fn is_side_pindir(&self) -> bool {
unsafe { Self::this_sm().execctrl().read().side_pindir() }
2022-07-27 20:45:46 +00:00
}
fn set_jmp_pin(&mut self, pin: u8) {
unsafe {
Self::this_sm().execctrl().modify(|w| w.set_jmp_pin(pin));
2022-07-27 20:45:46 +00:00
}
}
fn get_jmp_pin(&mut self) -> u8 {
unsafe { Self::this_sm().execctrl().read().jmp_pin() }
2022-07-27 20:45:46 +00:00
}
fn set_wrap(&self, source: u8, target: u8) {
unsafe {
Self::this_sm().execctrl().modify(|w| {
w.set_wrap_top(source);
w.set_wrap_bottom(target)
});
2022-07-27 20:45:46 +00:00
}
}
/// Get wrapping addresses. Returns (source, target).
fn get_wrap(&self) -> (u8, u8) {
unsafe {
let r = Self::this_sm().execctrl().read();
2022-07-27 20:45:46 +00:00
(r.wrap_top(), r.wrap_bottom())
}
}
fn set_fifo_join(&mut self, join: FifoJoin) {
let (rx, tx) = match join {
FifoJoin::Duplex => (false, false),
FifoJoin::RxOnly => (true, false),
FifoJoin::TxOnly => (false, true),
};
unsafe {
Self::this_sm().shiftctrl().modify(|w| {
w.set_fjoin_rx(rx);
w.set_fjoin_tx(tx)
});
2022-07-27 20:45:46 +00:00
}
}
fn get_fifo_join(&self) -> FifoJoin {
unsafe {
let r = Self::this_sm().shiftctrl().read();
2022-07-27 20:45:46 +00:00
// Ignores the invalid state when both bits are set
if r.fjoin_rx() {
FifoJoin::RxOnly
} else if r.fjoin_tx() {
FifoJoin::TxOnly
} else {
FifoJoin::Duplex
}
}
}
fn clear_fifos(&mut self) {
// Toggle FJOIN_RX to flush FIFOs
unsafe {
let shiftctrl = Self::this_sm().shiftctrl();
2022-07-27 20:45:46 +00:00
shiftctrl.modify(|w| {
w.set_fjoin_rx(!w.fjoin_rx());
});
shiftctrl.modify(|w| {
w.set_fjoin_rx(!w.fjoin_rx());
});
}
}
fn set_pull_threshold(&mut self, threshold: u8) {
unsafe {
Self::this_sm().shiftctrl().modify(|w| w.set_pull_thresh(threshold));
2022-07-27 20:45:46 +00:00
}
}
fn get_pull_threshold(&self) -> u8 {
unsafe { Self::this_sm().shiftctrl().read().pull_thresh() }
2022-07-27 20:45:46 +00:00
}
fn set_push_threshold(&mut self, threshold: u8) {
unsafe {
Self::this_sm().shiftctrl().modify(|w| w.set_push_thresh(threshold));
2022-07-27 20:45:46 +00:00
}
}
fn get_push_threshold(&self) -> u8 {
unsafe { Self::this_sm().shiftctrl().read().push_thresh() }
2022-07-27 20:45:46 +00:00
}
fn set_out_shift_dir(&mut self, dir: ShiftDirection) {
unsafe {
Self::this_sm()
2022-07-27 20:45:46 +00:00
.shiftctrl()
.modify(|w| w.set_out_shiftdir(dir == ShiftDirection::Right));
}
}
fn get_out_shiftdir(&self) -> ShiftDirection {
unsafe {
if Self::this_sm().shiftctrl().read().out_shiftdir() {
2022-07-27 20:45:46 +00:00
ShiftDirection::Right
} else {
ShiftDirection::Left
}
}
}
fn set_in_shift_dir(&mut self, dir: ShiftDirection) {
unsafe {
Self::this_sm()
2022-07-27 20:45:46 +00:00
.shiftctrl()
.modify(|w| w.set_in_shiftdir(dir == ShiftDirection::Right));
}
}
fn get_in_shiftdir(&self) -> ShiftDirection {
unsafe {
if Self::this_sm().shiftctrl().read().in_shiftdir() {
2022-07-27 20:45:46 +00:00
ShiftDirection::Right
} else {
ShiftDirection::Left
}
}
}
fn set_autopull(&mut self, auto: bool) {
unsafe {
Self::this_sm().shiftctrl().modify(|w| w.set_autopull(auto));
2022-07-27 20:45:46 +00:00
}
}
fn is_autopull(&self) -> bool {
unsafe { Self::this_sm().shiftctrl().read().autopull() }
2022-07-27 20:45:46 +00:00
}
fn set_autopush(&mut self, auto: bool) {
unsafe {
Self::this_sm().shiftctrl().modify(|w| w.set_autopush(auto));
2022-07-27 20:45:46 +00:00
}
}
fn is_autopush(&self) -> bool {
unsafe { Self::this_sm().shiftctrl().read().autopush() }
2022-07-27 20:45:46 +00:00
}
fn get_addr(&self) -> u8 {
unsafe { Self::this_sm().addr().read().addr() }
2022-07-27 20:45:46 +00:00
}
fn set_sideset_count(&mut self, count: u8) {
unsafe {
Self::this_sm().pinctrl().modify(|w| w.set_sideset_count(count));
2022-07-27 20:45:46 +00:00
}
}
fn get_sideset_count(&self) -> u8 {
unsafe { Self::this_sm().pinctrl().read().sideset_count() }
2022-07-27 20:45:46 +00:00
}
fn set_sideset_base_pin(&mut self, base_pin: &PioPin<Self::Pio>) {
unsafe {
Self::this_sm().pinctrl().modify(|w| w.set_sideset_base(base_pin.pin()));
2022-07-27 20:45:46 +00:00
}
}
fn get_sideset_base(&self) -> u8 {
unsafe {
let r = Self::this_sm().pinctrl().read();
2022-07-27 20:45:46 +00:00
r.sideset_base()
}
}
/// Set the range of out pins affected by a set instruction.
fn set_set_range(&mut self, base: u8, count: u8) {
assert!(base + count < 32);
unsafe {
Self::this_sm().pinctrl().modify(|w| {
w.set_set_base(base);
w.set_set_count(count)
});
2022-07-27 20:45:46 +00:00
}
}
/// Get the range of out pins affected by a set instruction. Returns (base, count).
fn get_set_range(&self) -> (u8, u8) {
unsafe {
let r = Self::this_sm().pinctrl().read();
2022-07-27 20:45:46 +00:00
(r.set_base(), r.set_count())
}
}
fn set_in_base_pin(&mut self, base: &PioPin<Self::Pio>) {
unsafe {
Self::this_sm().pinctrl().modify(|w| w.set_in_base(base.pin()));
2022-07-27 20:45:46 +00:00
}
}
fn get_in_base(&self) -> u8 {
unsafe {
let r = Self::this_sm().pinctrl().read();
2022-07-27 20:45:46 +00:00
r.in_base()
}
}
fn set_out_range(&mut self, base: u8, count: u8) {
assert!(base + count < 32);
unsafe {
Self::this_sm().pinctrl().modify(|w| {
w.set_out_base(base);
w.set_out_count(count)
});
2022-07-27 20:45:46 +00:00
}
}
/// Get the range of out pins affected by a set instruction. Returns (base, count).
fn get_out_range(&self) -> (u8, u8) {
unsafe {
let r = Self::this_sm().pinctrl().read();
2022-07-27 20:45:46 +00:00
(r.out_base(), r.out_count())
}
}
fn set_out_pins<'a, 'b: 'a>(&'a mut self, pins: &'b [&PioPin<Self::Pio>]) {
let count = pins.len();
assert!(count >= 1);
let start = pins[0].pin() as usize;
assert!(start + pins.len() <= 32);
for i in 0..count {
assert!(pins[i].pin() as usize == start + i, "Pins must be sequential");
}
self.set_out_range(start as u8, count as u8);
}
fn set_set_pins<'a, 'b: 'a>(&'a mut self, pins: &'b [&PioPin<Self::Pio>]) {
let count = pins.len();
assert!(count >= 1);
let start = pins[0].pin() as usize;
assert!(start + pins.len() <= 32);
for i in 0..count {
assert!(pins[i].pin() as usize == start + i, "Pins must be sequential");
}
self.set_set_range(start as u8, count as u8);
}
fn get_current_instr() -> u32 {
unsafe { Self::this_sm().instr().read().0 }
2022-07-27 20:45:46 +00:00
}
fn exec_instr(&mut self, instr: u16) {
unsafe {
Self::this_sm().instr().write(|w| w.set_instr(instr));
2022-07-27 20:45:46 +00:00
}
}
fn wait_push<'a>(&'a mut self, value: u32) -> FifoOutFuture<'a, Self::Pio, Self> {
FifoOutFuture::new(self, value)
}
fn wait_pull<'a>(&'a mut self) -> FifoInFuture<'a, Self::Pio, Self> {
FifoInFuture::new(self)
}
fn wait_irq(&self, irq_no: u8) -> IrqFuture<Self::Pio> {
IrqFuture::new(irq_no)
}
fn has_tx_stalled(&self) -> bool {
unsafe {
let fdebug = Self::Pio::PIO.fdebug();
let ret = fdebug.read().txstall() & (1 << Self::SM) != 0;
fdebug.write(|w| w.set_txstall(1 << Self::SM));
2022-07-27 20:45:46 +00:00
ret
}
}
fn has_tx_overflowed(&self) -> bool {
unsafe {
let fdebug = Self::Pio::PIO.fdebug();
let ret = fdebug.read().txover() & (1 << Self::SM) != 0;
fdebug.write(|w| w.set_txover(1 << Self::SM));
2022-07-27 20:45:46 +00:00
ret
}
}
fn has_rx_stalled(&self) -> bool {
unsafe {
let fdebug = Self::Pio::PIO.fdebug();
let ret = fdebug.read().rxstall() & (1 << Self::SM) != 0;
fdebug.write(|w| w.set_rxstall(1 << Self::SM));
2022-07-27 20:45:46 +00:00
ret
}
}
fn has_rx_underflowed(&self) -> bool {
unsafe {
let fdebug = Self::Pio::PIO.fdebug();
let ret = fdebug.read().rxunder() & (1 << Self::SM) != 0;
fdebug.write(|w| w.set_rxunder(1 << Self::SM));
2022-07-27 20:45:46 +00:00
ret
}
}
fn dma_push<'a, C: Channel, W: Word>(&'a self, ch: PeripheralRef<'a, C>, data: &'a [W]) -> Transfer<'a, C> {
2022-07-27 20:45:46 +00:00
unsafe {
let pio_no = Self::Pio::PIO_NO;
let sm_no = Self::SM;
2022-07-27 20:45:46 +00:00
let p = ch.regs();
p.read_addr().write_value(data.as_ptr() as u32);
p.write_addr().write_value(Self::Pio::PIO.txf(sm_no).ptr() as u32);
2022-07-27 20:45:46 +00:00
p.trans_count().write_value(data.len() as u32);
compiler_fence(Ordering::SeqCst);
2022-07-27 20:45:46 +00:00
p.ctrl_trig().write(|w| {
// Set TX DREQ for this statemachine
w.set_treq_sel(TreqSel(pio_no * 8 + sm_no as u8));
w.set_data_size(W::size());
2022-07-27 20:45:46 +00:00
w.set_chain_to(ch.number());
w.set_incr_read(true);
w.set_incr_write(false);
w.set_en(true);
});
compiler_fence(Ordering::SeqCst);
}
Transfer::new(ch)
}
fn dma_pull<'a, C: Channel, W: Word>(&'a self, ch: PeripheralRef<'a, C>, data: &'a mut [W]) -> Transfer<'a, C> {
2022-07-27 20:45:46 +00:00
unsafe {
let pio_no = Self::Pio::PIO_NO;
let sm_no = Self::SM;
2022-07-27 20:45:46 +00:00
let p = ch.regs();
p.write_addr().write_value(data.as_ptr() as u32);
p.read_addr().write_value(Self::Pio::PIO.rxf(sm_no).ptr() as u32);
2022-07-27 20:45:46 +00:00
p.trans_count().write_value(data.len() as u32);
compiler_fence(Ordering::SeqCst);
2022-07-27 20:45:46 +00:00
p.ctrl_trig().write(|w| {
// Set RX DREQ for this statemachine
w.set_treq_sel(TreqSel(pio_no * 8 + sm_no as u8 + 4));
w.set_data_size(W::size());
2022-07-27 20:45:46 +00:00
w.set_chain_to(ch.number());
w.set_incr_read(false);
w.set_incr_write(true);
w.set_en(true);
});
compiler_fence(Ordering::SeqCst);
}
Transfer::new(ch)
}
}
pub struct PioCommon<'d, PIO: PioInstance> {
instructions_used: u32,
pio: PhantomData<&'d PIO>,
2022-07-27 20:45:46 +00:00
}
impl<'d, PIO: PioInstance> Drop for PioCommon<'d, PIO> {
fn drop(&mut self) {
on_pio_drop::<PIO>();
}
}
pub struct PioInstanceMemory<'d, PIO: PioInstance> {
used_mask: u32,
pio: PhantomData<&'d PIO>,
2022-07-27 20:45:46 +00:00
}
impl<'d, PIO: PioInstance> PioCommon<'d, PIO> {
pub fn write_instr<I>(&mut self, start: usize, instrs: I) -> PioInstanceMemory<'d, PIO>
where
I: Iterator<Item = u16>,
{
let mut used_mask = 0;
for (i, instr) in instrs.enumerate() {
let addr = (i + start) as u8;
let mask = 1 << (addr as usize);
assert!(
self.instructions_used & mask == 0,
"Trying to write already used PIO instruction memory at {}",
addr
);
unsafe {
PIO::PIO.instr_mem(addr as usize).write(|w| {
w.set_instr_mem(instr);
});
}
used_mask |= mask;
2022-07-27 20:45:46 +00:00
}
self.instructions_used |= used_mask;
PioInstanceMemory {
used_mask,
pio: PhantomData,
}
}
/// Free instruction memory previously allocated with [`PioCommon::write_instr`].
/// This is always possible but unsafe if any state machine is still using this
/// bit of memory.
pub unsafe fn free_instr(&mut self, instrs: PioInstanceMemory<PIO>) {
self.instructions_used &= !instrs.used_mask;
2022-07-27 20:45:46 +00:00
}
pub fn is_irq_set(&self, irq_no: u8) -> bool {
assert!(irq_no < 8);
unsafe {
let irq_flags = PIO::PIO.irq();
irq_flags.read().0 & (1 << irq_no) != 0
}
}
pub fn clear_irq(&mut self, irq_no: usize) {
2022-07-27 20:45:46 +00:00
assert!(irq_no < 8);
unsafe { PIO::PIO.irq().write(|w| w.set_irq(1 << irq_no)) }
2022-07-27 20:45:46 +00:00
}
pub fn clear_irqs(&mut self, mask: u8) {
unsafe { PIO::PIO.irq().write(|w| w.set_irq(mask)) }
2022-07-27 20:45:46 +00:00
}
pub fn force_irq(&mut self, irq_no: usize) {
2022-07-27 20:45:46 +00:00
assert!(irq_no < 8);
unsafe { PIO::PIO.irq_force().write(|w| w.set_irq_force(1 << irq_no)) }
2022-07-27 20:45:46 +00:00
}
pub fn set_input_sync_bypass<'a>(&'a mut self, bypass: u32, mask: u32) {
2022-07-27 20:45:46 +00:00
unsafe {
// this can interfere with per-pin bypass functions. splitting the
// modification is going to be fine since nothing that relies on
// it can reasonably run before we finish.
PIO::PIO.input_sync_bypass().write_set(|w| *w = mask & bypass);
PIO::PIO.input_sync_bypass().write_clear(|w| *w = mask & !bypass);
2022-07-27 20:45:46 +00:00
}
}
pub fn get_input_sync_bypass(&self) -> u32 {
unsafe { PIO::PIO.input_sync_bypass().read() }
2022-07-27 20:45:46 +00:00
}
/// Register a pin for PIO usage. Pins will be released from the PIO block
/// (i.e., have their `FUNCSEL` reset to `NULL`) when the [`PioCommon`] *and*
/// all [`PioStateMachine`]s for this block have been dropped.
pub fn make_pio_pin(&mut self, pin: impl Pin) -> PioPin<PIO> {
unsafe {
pin.io().ctrl().write(|w| w.set_funcsel(PIO::FUNCSEL.0));
}
// we can be relaxed about this because we're &mut here and nothing is cached
PIO::state().used_pins.fetch_or(1 << pin.pin_bank(), Ordering::Relaxed);
PioPin {
pin_bank: pin.pin_bank(),
pio: PhantomData::default(),
}
}
2022-07-27 20:45:46 +00:00
}
pub struct Pio<'d, PIO: PioInstance> {
pub common: PioCommon<'d, PIO>,
pub sm0: PioStateMachineInstance<'d, PIO, 0>,
pub sm1: PioStateMachineInstance<'d, PIO, 1>,
pub sm2: PioStateMachineInstance<'d, PIO, 2>,
pub sm3: PioStateMachineInstance<'d, PIO, 3>,
}
2022-07-27 20:45:46 +00:00
impl<'d, PIO: PioInstance> Pio<'d, PIO> {
pub fn new(_pio: impl Peripheral<P = PIO> + 'd) -> Self {
PIO::state().users.store(5, Ordering::Release);
PIO::state().used_pins.store(0, Ordering::Release);
Self {
common: PioCommon {
instructions_used: 0,
pio: PhantomData,
2022-07-27 20:45:46 +00:00
},
sm0: PioStateMachineInstance { pio: PhantomData },
sm1: PioStateMachineInstance { pio: PhantomData },
sm2: PioStateMachineInstance { pio: PhantomData },
sm3: PioStateMachineInstance { pio: PhantomData },
}
}
}
// we need to keep a record of which pins are assigned to each PIO. make_pio_pin
// notionally takes ownership of the pin it is given, but the wrapped pin cannot
// be treated as an owned resource since dropping it would have to deconfigure
// the pin, breaking running state machines in the process. pins are also shared
// between all state machines, which makes ownership even messier to track any
// other way.
pub struct State {
users: AtomicU8,
used_pins: AtomicU32,
}
fn on_pio_drop<PIO: PioInstance>() {
let state = PIO::state();
if state.users.fetch_sub(1, Ordering::AcqRel) == 1 {
let used_pins = state.used_pins.load(Ordering::Relaxed);
let null = Gpio0ctrlFuncsel::NULL.0;
for i in 0..32 {
if used_pins & (1 << i) != 0 {
unsafe {
pac::IO_BANK0.gpio(i).ctrl().write(|w| w.set_funcsel(null));
}
}
}
2022-07-27 20:45:46 +00:00
}
}
mod sealed {
use super::*;
pub trait PioStateMachine {
type Pio: super::PioInstance;
const SM: usize;
#[inline(always)]
fn this_sm() -> crate::pac::pio::StateMachine {
Self::Pio::PIO.sm(Self::SM as usize)
}
}
pub trait PioInstance {
const PIO_NO: u8;
const PIO: &'static crate::pac::pio::Pio;
const FUNCSEL: crate::pac::io::vals::Gpio0ctrlFuncsel;
#[inline]
fn state() -> &'static State {
static STATE: State = State {
users: AtomicU8::new(0),
used_pins: AtomicU32::new(0),
};
&STATE
}
}
}
pub trait PioInstance: sealed::PioInstance + Sized + Unpin {
fn pio(&self) -> u8 {
Self::PIO_NO
}
}
macro_rules! impl_pio {
($name:ident, $pio:expr, $pac:ident, $funcsel:ident) => {
impl sealed::PioInstance for peripherals::$name {
const PIO_NO: u8 = $pio;
const PIO: &'static pac::pio::Pio = &pac::$pac;
const FUNCSEL: pac::io::vals::Gpio0ctrlFuncsel = pac::io::vals::Gpio0ctrlFuncsel::$funcsel;
2022-07-27 20:45:46 +00:00
}
impl PioInstance for peripherals::$name {}
2022-07-27 20:45:46 +00:00
};
}
impl_pio!(PIO0, 0, PIO0, PIO0_0);
impl_pio!(PIO1, 1, PIO1, PIO1_0);