From 934a17c532175ab4c476f0ec4d4f0e744faf8ecf Mon Sep 17 00:00:00 2001 From: Aniket Singh Yadav Date: Sun, 15 Mar 2026 18:45:10 +0000 Subject: [PATCH 1/4] add sphinx-tags support and tag documentation, tutorials, and examples --- doc/conf.py | 9 +++++++++ doc/development/contributing.rst | 2 ++ doc/development/index.rst | 2 ++ doc/development/roadmap.rst | 2 ++ doc/development/whats_new.rst | 2 ++ doc/documentation/cite.rst | 2 ++ doc/documentation/cited.rst | 2 ++ doc/documentation/cookbook.rst | 2 ++ doc/documentation/datasets.rst | 2 ++ doc/documentation/design_philosophy.rst | 2 ++ doc/documentation/glossary.rst | 2 ++ doc/documentation/implementation.rst | 2 ++ doc/documentation/index.rst | 2 ++ doc/help/faq.rst | 4 +++- doc/help/index.rst | 2 ++ doc/help/learn_python.rst | 2 ++ doc/help/migrating.rst | 2 ++ doc/index.rst | 1 + doc/install/advanced.rst | 2 ++ doc/install/check_installation.rst | 2 ++ doc/install/freesurfer.rst | 2 ++ doc/install/ides.rst | 2 ++ doc/install/index.rst | 2 ++ doc/install/installers.rst | 2 ++ doc/install/manual_install.rst | 2 ++ doc/install/manual_install_python.rst | 2 ++ doc/install/mne_c.rst | 2 ++ doc/install/mne_tools_suite.rst | 2 ++ doc/install/updating.rst | 2 ++ doc/overview/people.rst | 2 ++ examples/datasets/brainstorm_data.py | 2 ++ examples/datasets/hf_sef_data.py | 2 ++ examples/datasets/kernel_phantom.py | 2 ++ examples/datasets/limo_data.py | 2 ++ examples/datasets/opm_data.py | 2 ++ examples/datasets/spm_faces_dataset.py | 2 ++ examples/decoding/decoding_csp_eeg.py | 2 ++ examples/decoding/decoding_csp_timefreq.py | 2 ++ examples/decoding/decoding_rsa_sgskip.py | 2 ++ examples/decoding/decoding_spatio_temporal_source.py | 2 ++ examples/decoding/decoding_spoc_CMC.py | 2 ++ .../decoding/decoding_time_generalization_conditions.py | 2 ++ .../decoding/decoding_unsupervised_spatial_filter.py | 2 ++ examples/decoding/decoding_xdawn_eeg.py | 2 ++ examples/decoding/ems_filtering.py | 2 ++ examples/decoding/linear_model_patterns.py | 2 ++ examples/decoding/receptive_field_mtrf.py | 2 ++ examples/decoding/ssd_spatial_filters.py | 2 ++ examples/forward/forward_sensitivity_maps.py | 2 ++ examples/forward/left_cerebellum_volume_source.py | 2 ++ examples/forward/source_space_morphing.py | 2 ++ examples/inverse/compute_mne_inverse_epochs_in_label.py | 2 ++ examples/inverse/compute_mne_inverse_raw_in_label.py | 2 ++ examples/inverse/compute_mne_inverse_volume.py | 2 ++ examples/inverse/custom_inverse_solver.py | 2 ++ examples/inverse/dics_epochs.py | 2 ++ examples/inverse/dics_source_power.py | 2 ++ examples/inverse/evoked_ers_source_power.py | 2 ++ examples/inverse/gamma_map_inverse.py | 2 ++ examples/inverse/label_activation_from_stc.py | 2 ++ examples/inverse/label_from_stc.py | 2 ++ examples/inverse/label_source_activations.py | 2 ++ examples/inverse/mixed_norm_inverse.py | 2 ++ examples/inverse/mixed_source_space_inverse.py | 2 ++ examples/inverse/mne_cov_power.py | 2 ++ examples/inverse/morph_surface_stc.py | 2 ++ examples/inverse/morph_volume_stc.py | 2 ++ examples/inverse/multi_dipole_model.py | 2 ++ examples/inverse/multidict_reweighted_tfmxne.py | 2 ++ examples/inverse/psf_ctf_label_leakage.py | 2 ++ examples/inverse/psf_ctf_vertices.py | 2 ++ examples/inverse/psf_ctf_vertices_lcmv.py | 2 ++ examples/inverse/psf_volume.py | 2 ++ examples/inverse/rap_music.py | 2 ++ examples/inverse/read_inverse.py | 2 ++ examples/inverse/read_stc.py | 2 ++ examples/inverse/resolution_metrics.py | 2 ++ examples/inverse/resolution_metrics_eegmeg.py | 2 ++ examples/inverse/snr_estimate.py | 2 ++ examples/inverse/source_space_snr.py | 2 ++ examples/inverse/time_frequency_mixed_norm_inverse.py | 2 ++ examples/inverse/trap_music.py | 2 ++ examples/inverse/vector_mne_solution.py | 2 ++ examples/io/elekta_epochs.py | 2 ++ examples/io/read_impedances.py | 2 ++ examples/io/read_neo_format.py | 2 ++ examples/io/read_noise_covariance_matrix.py | 2 ++ examples/io/read_xdf.py | 2 ++ examples/preprocessing/contralateral_referencing.py | 2 ++ examples/preprocessing/css.py | 2 ++ examples/preprocessing/define_target_events.py | 2 ++ examples/preprocessing/eeg_bridging.py | 2 ++ examples/preprocessing/eeg_csd.py | 2 ++ examples/preprocessing/eog_artifact_histogram.py | 2 ++ examples/preprocessing/eog_regression.py | 2 ++ examples/preprocessing/epochs_metadata.py | 2 ++ examples/preprocessing/esg_rm_heart_artefact_pcaobs.py | 2 ++ examples/preprocessing/find_ref_artifacts.py | 2 ++ examples/preprocessing/fnirs_artifact_removal.py | 2 ++ examples/preprocessing/ica_comparison.py | 2 ++ examples/preprocessing/interpolate_bad_channels.py | 2 ++ examples/preprocessing/interpolate_to.py | 2 ++ examples/preprocessing/movement_compensation.py | 2 ++ examples/preprocessing/movement_detection.py | 2 ++ examples/preprocessing/muscle_detection.py | 2 ++ examples/preprocessing/muscle_ica.py | 2 ++ examples/preprocessing/otp.py | 2 ++ examples/preprocessing/shift_evoked.py | 2 ++ examples/preprocessing/virtual_evoked.py | 2 ++ examples/preprocessing/xdawn_denoising.py | 2 ++ examples/simulation/plot_stc_metrics.py | 2 ++ examples/simulation/simulate_evoked_data.py | 2 ++ examples/simulation/simulate_raw_data.py | 2 ++ .../simulated_raw_data_using_subject_anatomy.py | 2 ++ examples/simulation/source_simulator.py | 2 ++ examples/stats/cluster_stats_evoked.py | 2 ++ examples/stats/fdr_stats_evoked.py | 2 ++ examples/stats/linear_regression_raw.py | 2 ++ examples/stats/sensor_permutation_test.py | 2 ++ examples/stats/sensor_regression.py | 2 ++ examples/time_frequency/compute_csd.py | 2 ++ examples/time_frequency/compute_source_psd_epochs.py | 2 ++ examples/time_frequency/source_label_time_frequency.py | 2 ++ examples/time_frequency/source_power_spectrum.py | 2 ++ examples/time_frequency/source_power_spectrum_opm.py | 2 ++ examples/time_frequency/source_space_time_frequency.py | 2 ++ examples/time_frequency/temporal_whitening.py | 2 ++ examples/time_frequency/time_frequency_erds.py | 2 ++ .../time_frequency/time_frequency_global_field_power.py | 2 ++ examples/time_frequency/time_frequency_simulated.py | 2 ++ examples/visualization/3d_to_2d.py | 2 ++ examples/visualization/brain.py | 2 ++ examples/visualization/channel_epochs_image.py | 2 ++ examples/visualization/eeg_on_scalp.py | 2 ++ examples/visualization/evoked_arrowmap.py | 2 ++ examples/visualization/evoked_topomap.py | 2 ++ examples/visualization/evoked_whitening.py | 2 ++ examples/visualization/eyetracking_plot_heatmap.py | 2 ++ examples/visualization/meg_sensors.py | 2 ++ examples/visualization/mne_helmet.py | 2 ++ examples/visualization/parcellation.py | 2 ++ examples/visualization/roi_erpimage_by_rt.py | 2 ++ examples/visualization/ssp_projs_sensitivity_map.py | 2 ++ examples/visualization/topo_compare_conditions.py | 2 ++ examples/visualization/topo_customized.py | 2 ++ examples/visualization/xhemi.py | 2 ++ pyproject.toml | 2 ++ tutorials/clinical/20_seeg.py | 2 ++ tutorials/clinical/30_ecog.py | 2 ++ tutorials/clinical/60_sleep.py | 2 ++ tutorials/epochs/10_epochs_overview.py | 2 ++ tutorials/epochs/15_baseline_regression.py | 2 ++ tutorials/epochs/20_visualize_epochs.py | 2 ++ tutorials/epochs/30_epochs_metadata.py | 2 ++ tutorials/epochs/40_autogenerate_metadata.py | 2 ++ tutorials/epochs/50_epochs_to_data_frame.py | 2 ++ tutorials/epochs/60_make_fixed_length_epochs.py | 2 ++ tutorials/evoked/10_evoked_overview.py | 2 ++ tutorials/evoked/20_visualize_evoked.py | 2 ++ tutorials/evoked/30_eeg_erp.py | 2 ++ tutorials/evoked/40_whitened.py | 2 ++ tutorials/forward/10_background_freesurfer.py | 2 ++ tutorials/forward/20_source_alignment.py | 2 ++ tutorials/forward/25_automated_coreg.py | 2 ++ tutorials/forward/30_forward.py | 2 ++ tutorials/forward/35_eeg_no_mri.py | 2 ++ tutorials/forward/50_background_freesurfer_mne.py | 2 ++ tutorials/forward/80_fix_bem_in_blender.py | 2 ++ tutorials/forward/90_compute_covariance.py | 2 ++ tutorials/intro/10_overview.py | 2 ++ tutorials/intro/15_inplace.py | 2 ++ tutorials/intro/20_events_from_raw.py | 2 ++ tutorials/intro/30_info.py | 2 ++ tutorials/intro/40_sensor_locations.py | 2 ++ tutorials/intro/50_configure_mne.py | 2 ++ tutorials/intro/70_report.py | 2 ++ tutorials/inverse/10_stc_class.py | 2 ++ tutorials/inverse/20_dipole_fit.py | 2 ++ tutorials/inverse/30_mne_dspm_loreta.py | 2 ++ tutorials/inverse/35_dipole_orientations.py | 2 ++ tutorials/inverse/40_mne_fixed_free.py | 2 ++ tutorials/inverse/50_beamformer_lcmv.py | 2 ++ tutorials/inverse/60_visualize_stc.py | 2 ++ tutorials/inverse/70_eeg_mri_coords.py | 2 ++ tutorials/inverse/80_brainstorm_phantom_elekta.py | 2 ++ tutorials/inverse/85_brainstorm_phantom_ctf.py | 2 ++ tutorials/inverse/90_phantom_4DBTi.py | 2 ++ tutorials/inverse/95_phantom_KIT.py | 2 ++ tutorials/io/10_reading_meg_data.py | 2 ++ tutorials/io/20_reading_eeg_data.py | 2 ++ tutorials/io/30_reading_fnirs_data.py | 2 ++ tutorials/io/60_ctf_bst_auditory.py | 2 ++ tutorials/io/70_reading_eyetracking_data.py | 2 ++ tutorials/machine-learning/30_strf.py | 2 ++ tutorials/machine-learning/50_decoding.py | 2 ++ tutorials/preprocessing/10_preprocessing_overview.py | 2 ++ tutorials/preprocessing/14_quality_control_report.py | 2 ++ tutorials/preprocessing/15_handling_bad_channels.py | 2 ++ tutorials/preprocessing/20_rejecting_bad_data.py | 2 ++ tutorials/preprocessing/25_background_filtering.py | 2 ++ tutorials/preprocessing/30_filtering_resampling.py | 2 ++ .../preprocessing/35_artifact_correction_regression.py | 2 ++ tutorials/preprocessing/40_artifact_correction_ica.py | 2 ++ tutorials/preprocessing/45_projectors_background.py | 2 ++ tutorials/preprocessing/50_artifact_correction_ssp.py | 2 ++ tutorials/preprocessing/55_setting_eeg_reference.py | 2 ++ tutorials/preprocessing/59_head_positions.py | 2 ++ tutorials/preprocessing/60_maxwell_filtering_sss.py | 2 ++ tutorials/preprocessing/70_fnirs_processing.py | 2 ++ tutorials/preprocessing/80_opm_processing.py | 2 ++ tutorials/preprocessing/90_eyetracking_data.py | 2 ++ tutorials/raw/10_raw_overview.py | 2 ++ tutorials/raw/20_event_arrays.py | 2 ++ tutorials/raw/30_annotate_raw.py | 2 ++ tutorials/raw/40_visualize_raw.py | 2 ++ tutorials/simulation/10_array_objs.py | 2 ++ tutorials/simulation/70_point_spread.py | 2 ++ tutorials/simulation/80_dics.py | 2 ++ tutorials/stats-sensor-space/10_background_stats.py | 2 ++ tutorials/stats-sensor-space/20_erp_stats.py | 2 ++ .../stats-sensor-space/40_cluster_1samp_time_freq.py | 2 ++ .../stats-sensor-space/50_cluster_between_time_freq.py | 2 ++ .../stats-sensor-space/70_cluster_rmANOVA_time_freq.py | 2 ++ .../75_cluster_ftest_spatiotemporal.py | 2 ++ .../20_cluster_1samp_spatiotemporal.py | 2 ++ .../30_cluster_ftest_spatiotemporal.py | 2 ++ .../60_cluster_rmANOVA_spatiotemporal.py | 2 ++ tutorials/time-freq/10_spectrum_class.py | 2 ++ tutorials/time-freq/20_sensors_time_frequency.py | 2 ++ tutorials/time-freq/50_ssvep.py | 2 ++ tutorials/visualization/10_publication_figure.py | 2 ++ tutorials/visualization/20_ui_events.py | 2 ++ 232 files changed, 471 insertions(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index 0d9fe79346e..c6bf98814d9 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -116,6 +116,7 @@ "sphinx_copybutton", "sphinx_design", "sphinx_gallery.gen_gallery", + "sphinx_tags", "sphinxcontrib.bibtex", "sphinxcontrib.youtube", "sphinxcontrib.towncrier.ext", @@ -169,6 +170,14 @@ towncrier_draft_working_directory = str(curpath.parent) +# -- sphinx-tags configuration ------------------------------------------------ + +tags_create_tags = True +tags_output_dir = "_tags" +tags_page_title = "Tag" +tags_overview_title = "All Tags" +tags_extension = ["rst"] + # -- Intersphinx configuration ----------------------------------------------- intersphinx_mapping = { diff --git a/doc/development/contributing.rst b/doc/development/contributing.rst index beed58f6558..83baee4b431 100644 --- a/doc/development/contributing.rst +++ b/doc/development/contributing.rst @@ -3,6 +3,8 @@ Contributing guide ================== +.. tags:: development, contributing, governance + .. highlight:: console Thanks for taking the time to contribute! MNE-Python is an open-source project diff --git a/doc/development/index.rst b/doc/development/index.rst index 98fc28f8e7f..c1a9a91b8e1 100644 --- a/doc/development/index.rst +++ b/doc/development/index.rst @@ -1,6 +1,8 @@ MNE-Python Development ====================== +.. tags:: development, contributing, governance + .. NOTE: this first section (up until "overview of contribution process") is basically a copy/paste of CONTRIBUTING.md from the repository root, with one sentence deleted to avoid self-referential linking. Changes made here should diff --git a/doc/development/roadmap.rst b/doc/development/roadmap.rst index bab992d89d4..5eb45c61973 100644 --- a/doc/development/roadmap.rst +++ b/doc/development/roadmap.rst @@ -1,6 +1,8 @@ Roadmap ======= +.. tags:: development, roadmap, planning + This page describes some of the major medium- to long-term goals for MNE-Python. These are goals that require substantial effort and/or API design considerations. Some of these may be suitable for Google Summer of diff --git a/doc/development/whats_new.rst b/doc/development/whats_new.rst index 79bf102a2b3..10d4f810209 100644 --- a/doc/development/whats_new.rst +++ b/doc/development/whats_new.rst @@ -3,6 +3,8 @@ What's new ========== +.. tags:: development, changelog, releases + Changes for each version of MNE-Python are listed below. .. toctree:: diff --git a/doc/documentation/cite.rst b/doc/documentation/cite.rst index 5d67fec3ea3..edc329a58e1 100644 --- a/doc/documentation/cite.rst +++ b/doc/documentation/cite.rst @@ -3,6 +3,8 @@ How to cite MNE-Python ====================== +.. tags:: documentation, citation, publication + Citing the software ------------------- diff --git a/doc/documentation/cited.rst b/doc/documentation/cited.rst index b7f9821b8d3..dbf566a75ae 100644 --- a/doc/documentation/cited.rst +++ b/doc/documentation/cited.rst @@ -3,6 +3,8 @@ Papers citing MNE-Python ======================== +.. tags:: documentation, citation, publication + Estimates provided by Google Scholar as of 16 December 2024: - `MNE (2,190) `_ diff --git a/doc/documentation/cookbook.rst b/doc/documentation/cookbook.rst index d5eb149002b..dd586d7ba47 100644 --- a/doc/documentation/cookbook.rst +++ b/doc/documentation/cookbook.rst @@ -4,6 +4,8 @@ The typical M/EEG workflow ========================== +.. tags:: documentation, workflow, preprocessing, source-localization + Overview ======== diff --git a/doc/documentation/datasets.rst b/doc/documentation/datasets.rst index 2ec98664e74..81111f81034 100644 --- a/doc/documentation/datasets.rst +++ b/doc/documentation/datasets.rst @@ -3,6 +3,8 @@ Datasets Overview ################# +.. tags:: documentation, datasets, examples + .. note:: Contributing datasets to MNE-Python :class: sidebar diff --git a/doc/documentation/design_philosophy.rst b/doc/documentation/design_philosophy.rst index 5bdec09b4fa..5136caa37bc 100644 --- a/doc/documentation/design_philosophy.rst +++ b/doc/documentation/design_philosophy.rst @@ -3,6 +3,8 @@ Design philosophy ================= +.. tags:: documentation, design, workflow + Interactive versus scripted analysis ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/documentation/glossary.rst b/doc/documentation/glossary.rst index 89a5c477a75..4578f6b1ddb 100644 --- a/doc/documentation/glossary.rst +++ b/doc/documentation/glossary.rst @@ -1,6 +1,8 @@ Glossary ======== +.. tags:: documentation, glossary, terminology + The Glossary provides short definitions of vocabulary specific to MNE-Python and general neuroimaging concepts. If you think a term is missing, please consider `creating a new issue`_ or `opening a pull request`_ to add it. diff --git a/doc/documentation/implementation.rst b/doc/documentation/implementation.rst index 49fe31bac9c..8f270fc5fb0 100644 --- a/doc/documentation/implementation.rst +++ b/doc/documentation/implementation.rst @@ -3,6 +3,8 @@ Algorithms and other implementation details =========================================== +.. tags:: documentation, implementation, internals + This page describes some of the technical details of MNE-Python implementation. .. _units: diff --git a/doc/documentation/index.rst b/doc/documentation/index.rst index 764fcd08188..2431fc63ff5 100644 --- a/doc/documentation/index.rst +++ b/doc/documentation/index.rst @@ -3,6 +3,8 @@ Documentation overview ====================== +.. tags:: documentation, tutorials, examples, api-reference + .. note:: If you haven't already installed MNE-Python, please take a look diff --git a/doc/help/faq.rst b/doc/help/faq.rst index 7720885d643..56b94abc0fc 100644 --- a/doc/help/faq.rst +++ b/doc/help/faq.rst @@ -4,6 +4,8 @@ Frequently Asked Questions (FAQ) ================================ +.. tags:: help, faq, troubleshooting + .. highlight:: python General MNE-Python issues @@ -29,7 +31,7 @@ If PyVista plotting in Jupyter Notebooks doesn't work well, using the IPython magic ``%gui qt`` should `help `_. -.. code-block:: ipython +.. code-block:: pycon %gui qt diff --git a/doc/help/index.rst b/doc/help/index.rst index ceaf7a10c25..fffed4b5817 100644 --- a/doc/help/index.rst +++ b/doc/help/index.rst @@ -3,6 +3,8 @@ Getting help ^^^^^^^^^^^^ +.. tags:: help, faq, troubleshooting, community + There are several places to obtain help with MNE software tools. - The `MNE Forum`_ is a good placed to go for both troubleshooting and general diff --git a/doc/help/learn_python.rst b/doc/help/learn_python.rst index 25d42b3847f..af3e46ee9c3 100644 --- a/doc/help/learn_python.rst +++ b/doc/help/learn_python.rst @@ -3,6 +3,8 @@ Getting started with Python =========================== +.. tags:: help, beginners, python, learning + `Python`_ is a modern general-purpose object-oriented high-level programming language. There are many general introductions to Python online; here are a few: diff --git a/doc/help/migrating.rst b/doc/help/migrating.rst index 9aeb945c418..25764531449 100644 --- a/doc/help/migrating.rst +++ b/doc/help/migrating.rst @@ -3,6 +3,8 @@ Migrating from other analysis software ====================================== +.. tags:: help, migrating, interoperability + Here we offer some tips on how to migrate from other analysis software. EEGLAB diff --git a/doc/index.rst b/doc/index.rst index 7629f3d037b..83a1b412830 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -39,3 +39,4 @@ MNE-Python Homepage API Reference Get Help Development + Tags <_tags/tagsindex> diff --git a/doc/install/advanced.rst b/doc/install/advanced.rst index f1e217403aa..0def9555532 100644 --- a/doc/install/advanced.rst +++ b/doc/install/advanced.rst @@ -3,6 +3,8 @@ Advanced setup ============== +.. tags:: installation, advanced, jupyter, troubleshooting + Working with Jupyter Notebooks and JupyterLab ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/install/check_installation.rst b/doc/install/check_installation.rst index 4a7592a66e8..3eee4852060 100644 --- a/doc/install/check_installation.rst +++ b/doc/install/check_installation.rst @@ -4,6 +4,8 @@ Testing your installation ========================= +.. tags:: installation, setup, troubleshooting + To make sure MNE-Python was installed correctly, type the following command in a terminal:: diff --git a/doc/install/freesurfer.rst b/doc/install/freesurfer.rst index 1de6fc82c55..b547a083719 100644 --- a/doc/install/freesurfer.rst +++ b/doc/install/freesurfer.rst @@ -1,6 +1,8 @@ Installing FreeSurfer ===================== +.. tags:: installation, freesurfer, source-localization, bem + `FreeSurfer `_ is software for analysis and visualization of MRI data. In the MNE ecosystem, freesurfer is used to convert structural MRI scans into models of the scalp, inner/outer skull, and cortical surfaces, which are used diff --git a/doc/install/ides.rst b/doc/install/ides.rst index ff5d28ff381..08e6da9dd01 100644 --- a/doc/install/ides.rst +++ b/doc/install/ides.rst @@ -3,6 +3,8 @@ IDE integration (VSCode, Spyder, etc.) ====================================== +.. tags:: installation, setup, ide, vscode + Most users find it convenient to write and run their code in an `Integrated Development Environment`_ (IDE). Some popular choices for scientific Python development are: diff --git a/doc/install/index.rst b/doc/install/index.rst index 1adbee4df92..e92e1e12c55 100644 --- a/doc/install/index.rst +++ b/doc/install/index.rst @@ -6,6 +6,8 @@ Installing MNE-Python ====================== +.. tags:: installation, setup, beginners, advanced + .. when https://github.com/executablebooks/sphinx-design/issues/66 is fixed, prepend |cloud-arrow-down| |ensp| to the "Download installers" button text diff --git a/doc/install/installers.rst b/doc/install/installers.rst index 6a132979732..5caede99938 100644 --- a/doc/install/installers.rst +++ b/doc/install/installers.rst @@ -3,6 +3,8 @@ MNE-Python installers ===================== +.. tags:: installation, setup, beginners + MNE-Python installers are the easiest way to install MNE-Python and all dependencies. They also provide many additional Python packages and tools. Got any questions? Let us know on the `MNE Forum`_! diff --git a/doc/install/manual_install.rst b/doc/install/manual_install.rst index 5a67a3aea68..e9beafcd4bd 100644 --- a/doc/install/manual_install.rst +++ b/doc/install/manual_install.rst @@ -4,6 +4,8 @@ Install via :code:`pip` or :code:`conda` ======================================== +.. tags:: installation, setup, pip, conda + .. hint:: If you're unfamiliar with Python, we recommend using our :ref:`installers` instead. diff --git a/doc/install/manual_install_python.rst b/doc/install/manual_install_python.rst index 623b27d2a12..3af690a5f0a 100644 --- a/doc/install/manual_install_python.rst +++ b/doc/install/manual_install_python.rst @@ -5,6 +5,8 @@ Installing Python ================= +.. tags:: installation, setup, python, beginners + MNE-Python requires Python and several Python packages. MNE-Python version |version| requires Python version |min_python_version| or higher. diff --git a/doc/install/mne_c.rst b/doc/install/mne_c.rst index 10985a0c392..34de7641a5c 100644 --- a/doc/install/mne_c.rst +++ b/doc/install/mne_c.rst @@ -5,6 +5,8 @@ Installing MNE-C ================ +.. tags:: installation, mne-c, legacy-tools + System requirements ^^^^^^^^^^^^^^^^^^^ diff --git a/doc/install/mne_tools_suite.rst b/doc/install/mne_tools_suite.rst index 0a37f666634..c10a70e9431 100644 --- a/doc/install/mne_tools_suite.rst +++ b/doc/install/mne_tools_suite.rst @@ -1,6 +1,8 @@ Overview of the MNE tools suite =============================== +.. tags:: installation, interoperability, mne-c, mne-python + MNE-Python is an open-source Python module for processing, analysis, and visualization of functional neuroimaging data (EEG, MEG, sEEG, ECoG, and fNIRS). There are several related or interoperable software packages that you diff --git a/doc/install/updating.rst b/doc/install/updating.rst index c946d5e496e..ac6661eb796 100644 --- a/doc/install/updating.rst +++ b/doc/install/updating.rst @@ -1,6 +1,8 @@ Updating MNE-Python =================== +.. tags:: installation, updating, pip, conda + If you want to update MNE-Python to a newer version, there are a few different options, depending on how you originally installed it. diff --git a/doc/overview/people.rst b/doc/overview/people.rst index d3d5899ffb6..8f713ad64ee 100644 --- a/doc/overview/people.rst +++ b/doc/overview/people.rst @@ -5,6 +5,8 @@ Current Project Leadership and Institutional Partners ===================================================== +.. tags:: development, governance, community + .. _maintainer-team-people: Maintainer Team diff --git a/examples/datasets/brainstorm_data.py b/examples/datasets/brainstorm_data.py index ab5499fea71..3e9df575635 100644 --- a/examples/datasets/brainstorm_data.py +++ b/examples/datasets/brainstorm_data.py @@ -5,6 +5,8 @@ Brainstorm raw (median nerve) dataset ===================================== +.. tags:: examples, datasets, meg + Here we compute the evoked from raw for the Brainstorm tutorial dataset. For comparison, see :footcite:`TadelEtAl2011` and https://neuroimage.usc.edu/brainstorm/Tutorials/MedianNerveCtf. diff --git a/examples/datasets/hf_sef_data.py b/examples/datasets/hf_sef_data.py index 44aa6e8f9a4..135ddf23feb 100644 --- a/examples/datasets/hf_sef_data.py +++ b/examples/datasets/hf_sef_data.py @@ -5,6 +5,8 @@ HF-SEF dataset ============== +.. tags:: examples, datasets, meg, evoked + This example looks at high-frequency SEF responses. """ # Author: Jussi Nurminen (jnu@iki.fi) diff --git a/examples/datasets/kernel_phantom.py b/examples/datasets/kernel_phantom.py index da17f708454..f96b1a5978e 100644 --- a/examples/datasets/kernel_phantom.py +++ b/examples/datasets/kernel_phantom.py @@ -4,6 +4,8 @@ Kernel OPM phantom data ======================= +.. tags:: examples, datasets, meg, evoked, inverse + In this dataset, a Neuromag phantom was placed inside the Kernel OPM helmet and stimulated with 7 modules active (121 channels). Here we show some example traces. """ diff --git a/examples/datasets/limo_data.py b/examples/datasets/limo_data.py index 614a13309b9..962db39237b 100644 --- a/examples/datasets/limo_data.py +++ b/examples/datasets/limo_data.py @@ -5,6 +5,8 @@ Single trial linear regression analysis with the LIMO dataset ============================================================= +.. tags:: examples, datasets, eeg, statistics, epochs + Here we explore the structure of the data contained in the `LIMO dataset`_. This example replicates and extends some of the main analysis diff --git a/examples/datasets/opm_data.py b/examples/datasets/opm_data.py index 8f68e29d318..fe05bf21070 100644 --- a/examples/datasets/opm_data.py +++ b/examples/datasets/opm_data.py @@ -4,6 +4,8 @@ Optically pumped magnetometer (OPM) data ======================================== +.. tags:: examples, datasets, meg, source-localization + In this dataset, electrical median nerve stimulation was delivered to the left wrist of the subject. Somatosensory evoked fields were measured using nine QuSpin SERF OPMs placed over the right-hand side somatomotor area. Here diff --git a/examples/datasets/spm_faces_dataset.py b/examples/datasets/spm_faces_dataset.py index 32df7d1a9ed..d28a9428323 100644 --- a/examples/datasets/spm_faces_dataset.py +++ b/examples/datasets/spm_faces_dataset.py @@ -5,6 +5,8 @@ From raw data to dSPM on SPM Faces dataset ========================================== +.. tags:: examples, datasets, preprocessing, ica, inverse, source-localization + Runs a full pipeline using MNE-Python. This example does quite a bit of processing, so even on a fast machine it can take several minutes to complete. """ diff --git a/examples/decoding/decoding_csp_eeg.py b/examples/decoding/decoding_csp_eeg.py index 758c674e16e..1a92460b680 100644 --- a/examples/decoding/decoding_csp_eeg.py +++ b/examples/decoding/decoding_csp_eeg.py @@ -5,6 +5,8 @@ Motor imagery decoding from EEG data using the Common Spatial Pattern (CSP) =========================================================================== +.. tags:: examples, decoding, eeg, csp + Decoding of motor imagery applied to EEG data decomposed using CSP. A classifier is then applied to features extracted on CSP-filtered signals. diff --git a/examples/decoding/decoding_csp_timefreq.py b/examples/decoding/decoding_csp_timefreq.py index 9c26bf05444..01adb1f794e 100644 --- a/examples/decoding/decoding_csp_timefreq.py +++ b/examples/decoding/decoding_csp_timefreq.py @@ -5,6 +5,8 @@ Decoding in time-frequency space using Common Spatial Patterns (CSP) ==================================================================== +.. tags:: examples, decoding, time-frequency, classification + The time-frequency decomposition is estimated by iterating over raw data that has been band-passed at different frequencies. This is used to compute a covariance matrix over each epoch or a rolling time-window and extract the CSP diff --git a/examples/decoding/decoding_rsa_sgskip.py b/examples/decoding/decoding_rsa_sgskip.py index 1daaabb9619..47cfa4fa9d6 100644 --- a/examples/decoding/decoding_rsa_sgskip.py +++ b/examples/decoding/decoding_rsa_sgskip.py @@ -5,6 +5,8 @@ Representational Similarity Analysis ==================================== +.. tags:: examples, decoding, statistics, classification + Representational Similarity Analysis is used to perform summary statistics on supervised classifications where the number of classes is relatively high. It consists in characterizing the structure of the confusion matrix to infer diff --git a/examples/decoding/decoding_spatio_temporal_source.py b/examples/decoding/decoding_spatio_temporal_source.py index f724ea97b3b..0b1fdd3b8b1 100644 --- a/examples/decoding/decoding_spatio_temporal_source.py +++ b/examples/decoding/decoding_spatio_temporal_source.py @@ -5,6 +5,8 @@ Decoding source space data ========================== +.. tags:: examples, decoding, meg, source-localization, classification + Decoding to MEG data in source space on the left cortical surface. Here univariate feature selection is employed for speed purposes to confine the classification to a small number of potentially relevant features. The diff --git a/examples/decoding/decoding_spoc_CMC.py b/examples/decoding/decoding_spoc_CMC.py index 3accd5b2cd6..4b0d1998a5c 100644 --- a/examples/decoding/decoding_spoc_CMC.py +++ b/examples/decoding/decoding_spoc_CMC.py @@ -5,6 +5,8 @@ Continuous Target Decoding with SPoC ==================================== +.. tags:: examples, decoding, meg, connectivity, classification + Source Power Comodulation (SPoC) :footcite:`DahneEtAl2014` allows to identify the composition of orthogonal spatial filters that maximally correlate with a continuous target. diff --git a/examples/decoding/decoding_time_generalization_conditions.py b/examples/decoding/decoding_time_generalization_conditions.py index cc9e62f06cf..87a6ab4fafe 100644 --- a/examples/decoding/decoding_time_generalization_conditions.py +++ b/examples/decoding/decoding_time_generalization_conditions.py @@ -5,6 +5,8 @@ Decoding sensor space data with generalization across time and conditions ========================================================================= +.. tags:: examples, decoding, meg, epochs, classification + This example runs the analysis described in :footcite:`KingDehaene2014`. It illustrates how one can fit a linear classifier to identify a discriminatory topography at a given time instant and subsequently assess whether this linear diff --git a/examples/decoding/decoding_unsupervised_spatial_filter.py b/examples/decoding/decoding_unsupervised_spatial_filter.py index 2fb1a8fec46..441fbb63243 100644 --- a/examples/decoding/decoding_unsupervised_spatial_filter.py +++ b/examples/decoding/decoding_unsupervised_spatial_filter.py @@ -5,6 +5,8 @@ Analysis of evoked response using ICA and PCA reduction techniques ================================================================== +.. tags:: examples, decoding, ica, evoked, epochs + This example computes PCA and ICA of evoked or epochs data. Then the PCA / ICA components, a.k.a. spatial filters, are used to transform the channel data to new sources / virtual channels. The output is diff --git a/examples/decoding/decoding_xdawn_eeg.py b/examples/decoding/decoding_xdawn_eeg.py index a7d70bcb5bb..922c73c17b3 100644 --- a/examples/decoding/decoding_xdawn_eeg.py +++ b/examples/decoding/decoding_xdawn_eeg.py @@ -5,6 +5,8 @@ XDAWN Decoding From EEG data ============================ +.. tags:: examples, decoding, eeg, classification, evoked + ERP decoding with Xdawn :footcite:`RivetEtAl2009,RivetEtAl2011`. For each event type, a set of spatial Xdawn filters are trained and applied on the signal. Channels are concatenated and rescaled to create features vectors that will be diff --git a/examples/decoding/ems_filtering.py b/examples/decoding/ems_filtering.py index d1e04e2e096..1197d296d40 100644 --- a/examples/decoding/ems_filtering.py +++ b/examples/decoding/ems_filtering.py @@ -5,6 +5,8 @@ Compute effect-matched-spatial filtering (EMS) ============================================== +.. tags:: examples, decoding, evoked, epochs, visualization + This example computes the EMS to reconstruct the time course of the experimental effect as described in :footcite:`SchurgerEtAl2013`. diff --git a/examples/decoding/linear_model_patterns.py b/examples/decoding/linear_model_patterns.py index 48d679ed1fd..40f99bb60b2 100644 --- a/examples/decoding/linear_model_patterns.py +++ b/examples/decoding/linear_model_patterns.py @@ -5,6 +5,8 @@ Linear classifier on sensor data with plot patterns and filters =============================================================== +.. tags:: examples, decoding, meg, eeg, machine-learning, classification + Here decoding, a.k.a MVPA or supervised machine learning, is applied to M/EEG data in sensor space. Fit a linear classifier with the LinearModel object providing topographical patterns which are more neurophysiologically diff --git a/examples/decoding/receptive_field_mtrf.py b/examples/decoding/receptive_field_mtrf.py index 89a97956559..77da1da3ca5 100644 --- a/examples/decoding/receptive_field_mtrf.py +++ b/examples/decoding/receptive_field_mtrf.py @@ -5,6 +5,8 @@ Receptive Field Estimation and Prediction ========================================= +.. tags:: examples, decoding, eeg, machine-learning, classification + This example reproduces figures from Lalor et al.'s mTRF toolbox in MATLAB :footcite:`CrosseEtAl2016`. We will show how the :class:`mne.decoding.ReceptiveField` class diff --git a/examples/decoding/ssd_spatial_filters.py b/examples/decoding/ssd_spatial_filters.py index 7938fe6ad2a..8c39d20655d 100644 --- a/examples/decoding/ssd_spatial_filters.py +++ b/examples/decoding/ssd_spatial_filters.py @@ -5,6 +5,8 @@ Compute spatial filters with Spatio-Spectral Decomposition (SSD) ================================================================ +.. tags:: examples, decoding, time-frequency, preprocessing + In this example, we will compute spatial filters for retaining oscillatory brain activity and down-weighting 1/f background signals as proposed by :footcite:`NikulinEtAl2011`. diff --git a/examples/forward/forward_sensitivity_maps.py b/examples/forward/forward_sensitivity_maps.py index c9163b5b792..269aae33eb6 100644 --- a/examples/forward/forward_sensitivity_maps.py +++ b/examples/forward/forward_sensitivity_maps.py @@ -5,6 +5,8 @@ Display sensitivity maps for EEG and MEG sensors ================================================ +.. tags:: examples, forward-model, eeg, meg + Sensitivity maps can be produced from forward operators that indicate how well different sensor types will be able to detect neural currents from different regions of the brain. diff --git a/examples/forward/left_cerebellum_volume_source.py b/examples/forward/left_cerebellum_volume_source.py index ff810493e99..a34174277f0 100644 --- a/examples/forward/left_cerebellum_volume_source.py +++ b/examples/forward/left_cerebellum_volume_source.py @@ -5,6 +5,8 @@ Generate a left cerebellum volume source space ============================================== +.. tags:: examples, forward-model, source-localization, freesurfer, visualization + Generate a volume source space of the left cerebellum and plot its vertices relative to the left cortical surface source space and the FreeSurfer segmentation file. diff --git a/examples/forward/source_space_morphing.py b/examples/forward/source_space_morphing.py index fd5b992696e..bc32058ed30 100644 --- a/examples/forward/source_space_morphing.py +++ b/examples/forward/source_space_morphing.py @@ -5,6 +5,8 @@ Use source space morphing ========================= +.. tags:: examples, forward-model, source-localization, visualization + This example shows how to use source space morphing (as opposed to :class:`~mne.SourceEstimate` morphing) to create data that can be compared between subjects. diff --git a/examples/inverse/compute_mne_inverse_epochs_in_label.py b/examples/inverse/compute_mne_inverse_epochs_in_label.py index ca15c80efcc..c30504a0d61 100644 --- a/examples/inverse/compute_mne_inverse_epochs_in_label.py +++ b/examples/inverse/compute_mne_inverse_epochs_in_label.py @@ -5,6 +5,8 @@ Compute MNE-dSPM inverse solution on single epochs ================================================== +.. tags:: examples, inverse, source-localization, epochs, evoked + Compute dSPM inverse solution on single trial epochs restricted to a brain label. """ diff --git a/examples/inverse/compute_mne_inverse_raw_in_label.py b/examples/inverse/compute_mne_inverse_raw_in_label.py index b462c09e180..d60ecb24494 100644 --- a/examples/inverse/compute_mne_inverse_raw_in_label.py +++ b/examples/inverse/compute_mne_inverse_raw_in_label.py @@ -5,6 +5,8 @@ Compute sLORETA inverse solution on raw data ============================================= +.. tags:: examples, inverse, source-localization, raw, visualization + Compute sLORETA inverse solution on raw dataset restricted to a brain label and stores the solution in stc files for visualisation. diff --git a/examples/inverse/compute_mne_inverse_volume.py b/examples/inverse/compute_mne_inverse_volume.py index b4889df67a6..3c31e7cd0a1 100644 --- a/examples/inverse/compute_mne_inverse_volume.py +++ b/examples/inverse/compute_mne_inverse_volume.py @@ -5,6 +5,8 @@ Compute MNE-dSPM inverse solution on evoked data in volume source space ======================================================================= +.. tags:: examples, inverse, source-localization, evoked, visualization + Compute dSPM inverse solution on MNE evoked dataset in a volume source space and stores the solution in a nifti file for visualisation. """ diff --git a/examples/inverse/custom_inverse_solver.py b/examples/inverse/custom_inverse_solver.py index e8444dcd046..f3c7efd233f 100644 --- a/examples/inverse/custom_inverse_solver.py +++ b/examples/inverse/custom_inverse_solver.py @@ -5,6 +5,8 @@ Source localization with a custom inverse solver ================================================ +.. tags:: examples, inverse, source-localization, evoked, forward-model + The objective of this example is to show how to plug a custom inverse solver in MNE in order to facilate empirical comparison with the methods MNE already implements (wMNE, dSPM, sLORETA, eLORETA, LCMV, DICS, (TF-)MxNE etc.). diff --git a/examples/inverse/dics_epochs.py b/examples/inverse/dics_epochs.py index 1900e3af34b..7285b0e2c4d 100644 --- a/examples/inverse/dics_epochs.py +++ b/examples/inverse/dics_epochs.py @@ -5,6 +5,8 @@ Compute source level time-frequency timecourses using a DICS beamformer ======================================================================= +.. tags:: examples, inverse, beamforming, time-frequency, source-localization + In this example, a Dynamic Imaging of Coherent Sources (DICS) :footcite:`GrossEtAl2001` beamformer is used to transform sensor-level time-frequency objects to the source level. We will look at the event-related synchronization (ERS) of beta band activity in the diff --git a/examples/inverse/dics_source_power.py b/examples/inverse/dics_source_power.py index 586044cdd9f..90145236404 100644 --- a/examples/inverse/dics_source_power.py +++ b/examples/inverse/dics_source_power.py @@ -5,6 +5,8 @@ Compute source power using DICS beamformer ========================================== +.. tags:: examples, beamforming, inverse, connectivity + Compute a Dynamic Imaging of Coherent Sources (DICS) :footcite:`GrossEtAl2001` filter from single-trial activity to estimate source power across a frequency band. This example demonstrates how to source localize the event-related diff --git a/examples/inverse/evoked_ers_source_power.py b/examples/inverse/evoked_ers_source_power.py index 7e12b3c8028..bceb5bcd6dd 100644 --- a/examples/inverse/evoked_ers_source_power.py +++ b/examples/inverse/evoked_ers_source_power.py @@ -5,6 +5,8 @@ Compute evoked ERS source power using DICS, LCMV beamformer, and dSPM ===================================================================== +.. tags:: examples, inverse, beamforming, source-localization, time-frequency + Here we examine 3 ways of localizing event-related synchronization (ERS) of beta band activity in this dataset: :ref:`somato-dataset` using :term:`DICS`, :term:`LCMV beamformer`, and :term:`dSPM` applied to active and diff --git a/examples/inverse/gamma_map_inverse.py b/examples/inverse/gamma_map_inverse.py index 1085a6bb749..2a05276962e 100644 --- a/examples/inverse/gamma_map_inverse.py +++ b/examples/inverse/gamma_map_inverse.py @@ -5,6 +5,8 @@ Compute a sparse inverse solution using the Gamma-MAP empirical Bayesian method =============================================================================== +.. tags:: examples, inverse, gamma-map, sparse, bayesian + See :footcite:`WipfNagarajan2009` for details. """ # Author: Martin Luessi diff --git a/examples/inverse/label_activation_from_stc.py b/examples/inverse/label_activation_from_stc.py index daaf4c4ae12..d50fa14e7bd 100644 --- a/examples/inverse/label_activation_from_stc.py +++ b/examples/inverse/label_activation_from_stc.py @@ -5,6 +5,8 @@ Extracting time course from source_estimate object ================================================== +.. tags:: examples, inverse, source-localization, visualization + Load a SourceEstimate object from stc files and extract the time course of activation in individual labels, as well as in a complex label diff --git a/examples/inverse/label_from_stc.py b/examples/inverse/label_from_stc.py index 76545d4895f..eb2c349a7db 100644 --- a/examples/inverse/label_from_stc.py +++ b/examples/inverse/label_from_stc.py @@ -5,6 +5,8 @@ Generate a functional label from source estimates ================================================= +.. tags:: examples, inverse, source-localization, evoked, visualization + Threshold source estimates and produce a functional label. The label is typically the region of interest that contains high values. Here we compare the average time course in the anatomical label obtained diff --git a/examples/inverse/label_source_activations.py b/examples/inverse/label_source_activations.py index 74d338486b0..71fa960b013 100644 --- a/examples/inverse/label_source_activations.py +++ b/examples/inverse/label_source_activations.py @@ -5,6 +5,8 @@ Extracting the time series of activations in a label ==================================================== +.. tags:: examples, inverse, source-localization, evoked, visualization + We first apply a dSPM inverse operator to get signed activations in a label (with positive and negative values) and we then compare different strategies to average the times series in a label. We compare a simple average, with an diff --git a/examples/inverse/mixed_norm_inverse.py b/examples/inverse/mixed_norm_inverse.py index 70764a53973..45e53a14dc7 100644 --- a/examples/inverse/mixed_norm_inverse.py +++ b/examples/inverse/mixed_norm_inverse.py @@ -5,6 +5,8 @@ Compute sparse inverse solution with mixed norm: MxNE and irMxNE ================================================================ +.. tags:: examples, inverse, source-localization, evoked, visualization + Runs an (ir)MxNE (L1/L2 :footcite:`GramfortEtAl2012` or L0.5/L2 :footcite:`StrohmeierEtAl2014` mixed norm) inverse solver. L0.5/L2 is done with irMxNE which allows for sparser source estimates with less diff --git a/examples/inverse/mixed_source_space_inverse.py b/examples/inverse/mixed_source_space_inverse.py index c4e5d29fbf3..18e7d790dd4 100644 --- a/examples/inverse/mixed_source_space_inverse.py +++ b/examples/inverse/mixed_source_space_inverse.py @@ -5,6 +5,8 @@ Compute MNE inverse solution on evoked data with a mixed source space ===================================================================== +.. tags:: examples, inverse, source-localization, forward-model, visualization + Create a mixed source space and compute an MNE inverse solution on an evoked dataset. """ # Author: Annalisa Pascarella diff --git a/examples/inverse/mne_cov_power.py b/examples/inverse/mne_cov_power.py index a6cf0df181f..d26c3e785a0 100644 --- a/examples/inverse/mne_cov_power.py +++ b/examples/inverse/mne_cov_power.py @@ -5,6 +5,8 @@ Compute source power estimate by projecting the covariance with MNE =================================================================== +.. tags:: examples, inverse, source-localization, epochs, visualization + We can apply the MNE inverse operator to a covariance matrix to obtain an estimate of source power. This is computationally more efficient than first estimating the source timecourses and then computing their power. This diff --git a/examples/inverse/morph_surface_stc.py b/examples/inverse/morph_surface_stc.py index abf84345e14..7e920842a33 100644 --- a/examples/inverse/morph_surface_stc.py +++ b/examples/inverse/morph_surface_stc.py @@ -5,6 +5,8 @@ Morph surface source estimate ============================= +.. tags:: examples, inverse, source-localization, freesurfer, visualization + This example demonstrates how to morph an individual subject's :class:`mne.SourceEstimate` to a common reference space. We achieve this using :class:`mne.SourceMorph`. Pre-computed data will be morphed based on diff --git a/examples/inverse/morph_volume_stc.py b/examples/inverse/morph_volume_stc.py index 67c7fa1152d..a7d0360dc19 100644 --- a/examples/inverse/morph_volume_stc.py +++ b/examples/inverse/morph_volume_stc.py @@ -5,6 +5,8 @@ Morph volumetric source estimate ================================ +.. tags:: examples, inverse, source-localization, freesurfer, visualization + This example demonstrates how to morph an individual subject's :class:`mne.VolSourceEstimate` to a common reference space. We achieve this using :class:`mne.SourceMorph`. Data will be morphed based on an affine transformation and diff --git a/examples/inverse/multi_dipole_model.py b/examples/inverse/multi_dipole_model.py index b6985bcc182..ef6442c6598 100644 --- a/examples/inverse/multi_dipole_model.py +++ b/examples/inverse/multi_dipole_model.py @@ -5,6 +5,8 @@ Computing source timecourses with an XFit-like multi-dipole model ================================================================= +.. tags:: examples, inverse, source-localization, evoked, epochs + MEGIN's XFit program offers a "guided ECD modeling" interface, where multiple dipoles can be fitted interactively. By manually selecting subsets of sensors and time ranges, dipoles can be fitted to specific signal components. Then, diff --git a/examples/inverse/multidict_reweighted_tfmxne.py b/examples/inverse/multidict_reweighted_tfmxne.py index aed27ea880b..cb262866ed6 100644 --- a/examples/inverse/multidict_reweighted_tfmxne.py +++ b/examples/inverse/multidict_reweighted_tfmxne.py @@ -5,6 +5,8 @@ Compute iterative reweighted TF-MxNE with multiscale time-frequency dictionary ============================================================================== +.. tags:: examples, inverse, source-localization, time-frequency, evoked + The iterative reweighted TF-MxNE solver is a distributed inverse method based on the TF-MxNE solver, which promotes focal (sparse) sources :footcite:`StrohmeierEtAl2015`. The benefits of this approach are that: diff --git a/examples/inverse/psf_ctf_label_leakage.py b/examples/inverse/psf_ctf_label_leakage.py index 4991ae6eaf3..57ca5046468 100644 --- a/examples/inverse/psf_ctf_label_leakage.py +++ b/examples/inverse/psf_ctf_label_leakage.py @@ -5,6 +5,8 @@ Visualize source leakage among labels using a circular graph ============================================================ +.. tags:: examples, inverse, source-localization, connectivity, visualization + This example computes all-to-all pairwise leakage among 68 regions in source space based on MNE inverse solutions and a FreeSurfer cortical parcellation. Label-to-label leakage is estimated as the correlation among the diff --git a/examples/inverse/psf_ctf_vertices.py b/examples/inverse/psf_ctf_vertices.py index e6ab88a5fef..3402fc6880c 100644 --- a/examples/inverse/psf_ctf_vertices.py +++ b/examples/inverse/psf_ctf_vertices.py @@ -5,6 +5,8 @@ Plot point-spread functions (PSFs) and cross-talk functions (CTFs) ================================================================== +.. tags:: examples, inverse, source-localization, forward-model, visualization + Visualise PSF and CTF at one vertex for sLORETA. """ # Authors: Olaf Hauk diff --git a/examples/inverse/psf_ctf_vertices_lcmv.py b/examples/inverse/psf_ctf_vertices_lcmv.py index bf7009374e0..83addd04170 100644 --- a/examples/inverse/psf_ctf_vertices_lcmv.py +++ b/examples/inverse/psf_ctf_vertices_lcmv.py @@ -5,6 +5,8 @@ Compute cross-talk functions for LCMV beamformers ================================================= +.. tags:: examples, inverse, beamforming, source-localization, visualization + Visualise cross-talk functions at one vertex for LCMV beamformers computed with different data covariance matrices, which affects their cross-talk functions. diff --git a/examples/inverse/psf_volume.py b/examples/inverse/psf_volume.py index 91af78b9dfc..0628cc9de8a 100644 --- a/examples/inverse/psf_volume.py +++ b/examples/inverse/psf_volume.py @@ -5,6 +5,8 @@ Plot point-spread functions (PSFs) for a volume =============================================== +.. tags:: examples, inverse, source-localization, forward-model, visualization + Visualise PSF at one volume vertex for sLORETA. """ # Authors: Olaf Hauk diff --git a/examples/inverse/rap_music.py b/examples/inverse/rap_music.py index cc386605dc1..613c505aa46 100644 --- a/examples/inverse/rap_music.py +++ b/examples/inverse/rap_music.py @@ -5,6 +5,8 @@ Compute Rap-Music on evoked data ================================ +.. tags:: examples, inverse, source-localization, evoked, visualization + Compute a Recursively Applied and Projected MUltiple Signal Classification (RAP-MUSIC) :footcite:`MosherLeahy1999` on evoked data. """ diff --git a/examples/inverse/read_inverse.py b/examples/inverse/read_inverse.py index 148d09d84af..40ef0031c23 100644 --- a/examples/inverse/read_inverse.py +++ b/examples/inverse/read_inverse.py @@ -5,6 +5,8 @@ Reading an inverse operator =========================== +.. tags:: examples, inverse, source-localization + The inverse operator's source space is shown in 3D. """ # Author: Alexandre Gramfort diff --git a/examples/inverse/read_stc.py b/examples/inverse/read_stc.py index b06f61d14f8..bb9f081e834 100644 --- a/examples/inverse/read_stc.py +++ b/examples/inverse/read_stc.py @@ -5,6 +5,8 @@ Reading an STC file =================== +.. tags:: examples, inverse, source-localization, io, visualization + STC files contain activations on cortex ie. source reconstructions """ diff --git a/examples/inverse/resolution_metrics.py b/examples/inverse/resolution_metrics.py index 5ab39d3c645..a35ad0ba202 100644 --- a/examples/inverse/resolution_metrics.py +++ b/examples/inverse/resolution_metrics.py @@ -5,6 +5,8 @@ Compute spatial resolution metrics in source space ================================================== +.. tags:: examples, inverse, source-localization, statistics, visualization + Compute peak localisation error and spatial deviation for the point-spread functions of dSPM and MNE. Plot their distributions and difference of distributions. This example mimics some results from :footcite:`HaukEtAl2019`, diff --git a/examples/inverse/resolution_metrics_eegmeg.py b/examples/inverse/resolution_metrics_eegmeg.py index 51acd51fd94..1ceccaf2319 100644 --- a/examples/inverse/resolution_metrics_eegmeg.py +++ b/examples/inverse/resolution_metrics_eegmeg.py @@ -5,6 +5,8 @@ Compute spatial resolution metrics to compare MEG with EEG+MEG ============================================================== +.. tags:: examples, inverse, source-localization, meg, eeg, visualization + Compute peak localisation error and spatial deviation for the point-spread functions of dSPM and MNE. Plot their distributions and difference of distributions. This example mimics some results from :footcite:`HaukEtAl2019`, diff --git a/examples/inverse/snr_estimate.py b/examples/inverse/snr_estimate.py index fda8ada5d0c..d79f94a61ca 100644 --- a/examples/inverse/snr_estimate.py +++ b/examples/inverse/snr_estimate.py @@ -5,6 +5,8 @@ Estimate data SNR using an inverse ================================== +.. tags:: examples, inverse, evoked, visualization + This estimates the SNR as a function of time for a set of data using a minimum-norm inverse operator. """ diff --git a/examples/inverse/source_space_snr.py b/examples/inverse/source_space_snr.py index 965c57d86ca..c850667e08b 100644 --- a/examples/inverse/source_space_snr.py +++ b/examples/inverse/source_space_snr.py @@ -5,6 +5,8 @@ Computing source space SNR ========================== +.. tags:: examples, inverse, source-localization, meg, eeg, visualization + This example shows how to compute and plot source space SNR as in :footcite:`GoldenholzEtAl2009`. """ diff --git a/examples/inverse/time_frequency_mixed_norm_inverse.py b/examples/inverse/time_frequency_mixed_norm_inverse.py index bdd1134f39a..34d65a49a84 100644 --- a/examples/inverse/time_frequency_mixed_norm_inverse.py +++ b/examples/inverse/time_frequency_mixed_norm_inverse.py @@ -5,6 +5,8 @@ Compute MxNE with time-frequency sparse prior ============================================= +.. tags:: examples, inverse, source-localization, time-frequency, evoked + The TF-MxNE solver is a distributed inverse method (like dSPM or sLORETA) that promotes focal (sparse) sources (such as dipole fitting techniques) :footcite:`GramfortEtAl2013b,GramfortEtAl2011`. diff --git a/examples/inverse/trap_music.py b/examples/inverse/trap_music.py index 08dee1e54a9..e2cde8be5ee 100644 --- a/examples/inverse/trap_music.py +++ b/examples/inverse/trap_music.py @@ -5,6 +5,8 @@ Compute Trap-Music on evoked data ================================= +.. tags:: examples, inverse, source-localization, evoked, visualization + Compute a Truncated Recursively Applied and Projected MUltiple Signal Classification (TRAP-MUSIC) :footcite:`Makela2018` on evoked data. """ diff --git a/examples/inverse/vector_mne_solution.py b/examples/inverse/vector_mne_solution.py index f6ae788c145..ee2d68c27d9 100644 --- a/examples/inverse/vector_mne_solution.py +++ b/examples/inverse/vector_mne_solution.py @@ -5,6 +5,8 @@ Plotting the full vector-valued MNE solution ============================================ +.. tags:: examples, inverse, source-localization, evoked, visualization + The source space that is used for the inverse computation defines a set of dipoles, distributed across the cortex. When visualizing a source estimate, it is sometimes useful to show the dipole directions in addition to their diff --git a/examples/io/elekta_epochs.py b/examples/io/elekta_epochs.py index 35922b69531..163e1620a98 100644 --- a/examples/io/elekta_epochs.py +++ b/examples/io/elekta_epochs.py @@ -5,6 +5,8 @@ Getting averaging info from .fif files ====================================== +.. tags:: examples, io, epochs, meg + Parse averaging information defined in Elekta Vectorview/TRIUX DACQ (data acquisition). Extract and average epochs accordingly. Modify some averaging parameters and get epochs. """ diff --git a/examples/io/read_impedances.py b/examples/io/read_impedances.py index e1b7061c4c9..961e45c2a09 100644 --- a/examples/io/read_impedances.py +++ b/examples/io/read_impedances.py @@ -5,6 +5,8 @@ Getting impedances from raw files ================================= +.. tags:: examples, io, eeg, raw + Many EEG systems provide impedance measurements for each channel within their file format. MNE does not parse this information and does not store it in the :class:`~mne.io.Raw` object. However, it is possible to extract this information from diff --git a/examples/io/read_neo_format.py b/examples/io/read_neo_format.py index a34ff0cd4f4..f0151d35c6c 100644 --- a/examples/io/read_neo_format.py +++ b/examples/io/read_neo_format.py @@ -5,6 +5,8 @@ How to use data in neural ensemble (NEO) format =============================================== +.. tags:: examples, io, raw, interoperability + This example shows how to create an MNE-Python `~mne.io.Raw` object from data in the `neural ensemble `_ format. For general information on creating MNE-Python's data objects from NumPy arrays, see diff --git a/examples/io/read_noise_covariance_matrix.py b/examples/io/read_noise_covariance_matrix.py index b8b1dc5832c..15eb8a2e160 100644 --- a/examples/io/read_noise_covariance_matrix.py +++ b/examples/io/read_noise_covariance_matrix.py @@ -5,6 +5,8 @@ Reading/Writing a noise covariance matrix ========================================= +.. tags:: examples, io, inverse, visualization + How to plot a noise covariance matrix. """ # Author: Alexandre Gramfort diff --git a/examples/io/read_xdf.py b/examples/io/read_xdf.py index ee8524702a5..cc4ebf3fb66 100644 --- a/examples/io/read_xdf.py +++ b/examples/io/read_xdf.py @@ -5,6 +5,8 @@ Reading XDF EEG data ==================== +.. tags:: examples, io, raw, eeg + Here we read some sample XDF data. Although we do not analyze it here, this recording is of a short parallel auditory response (pABR) experiment :footcite:`PolonenkoMaddox2019` and was provided by the `Maddox Lab `_. diff --git a/examples/preprocessing/contralateral_referencing.py b/examples/preprocessing/contralateral_referencing.py index b2ae199817d..d11935fca9b 100644 --- a/examples/preprocessing/contralateral_referencing.py +++ b/examples/preprocessing/contralateral_referencing.py @@ -5,6 +5,8 @@ Using contralateral referencing for EEG ======================================= +.. tags:: examples, preprocessing, eeg, raw + Instead of using a single reference electrode for all channels, some researchers reference the EEG electrodes in each hemisphere to an electrode in the contralateral hemisphere (often an electrode over the mastoid bone; this is diff --git a/examples/preprocessing/css.py b/examples/preprocessing/css.py index c6684b74273..7f87adce6cb 100644 --- a/examples/preprocessing/css.py +++ b/examples/preprocessing/css.py @@ -5,6 +5,8 @@ Cortical Signal Suppression (CSS) for removal of cortical signals ================================================================= +.. tags:: examples, preprocessing, artifacts, meg, eeg, simulation + This script shows an example of how to use CSS :footcite:`Samuelsson2019` . CSS suppresses the cortical contribution to the signal subspace in EEG data using MEG data, facilitating diff --git a/examples/preprocessing/define_target_events.py b/examples/preprocessing/define_target_events.py index f438d07c753..aec298f9823 100644 --- a/examples/preprocessing/define_target_events.py +++ b/examples/preprocessing/define_target_events.py @@ -5,6 +5,8 @@ Define target events based on time lag, plot evoked response ============================================================ +.. tags:: examples, preprocessing, events, epochs, evoked + This script shows how to define higher order events based on time lag between reference and target events. For illustration, we will put face stimuli presented into two diff --git a/examples/preprocessing/eeg_bridging.py b/examples/preprocessing/eeg_bridging.py index 37d85c55df6..0ee0603e637 100644 --- a/examples/preprocessing/eeg_bridging.py +++ b/examples/preprocessing/eeg_bridging.py @@ -5,6 +5,8 @@ Identify EEG Electrodes Bridged by too much Gel =============================================== +.. tags:: examples, preprocessing, eeg, artifacts + Research-grade EEG often uses a gel based system, and when too much gel is applied the gel conducting signal from the scalp to the electrode for one electrode connects with the gel conducting signal from another electrode diff --git a/examples/preprocessing/eeg_csd.py b/examples/preprocessing/eeg_csd.py index e5c6086c068..cf35d15832f 100644 --- a/examples/preprocessing/eeg_csd.py +++ b/examples/preprocessing/eeg_csd.py @@ -5,6 +5,8 @@ Transform EEG data using current source density (CSD) ===================================================== +.. tags:: examples, preprocessing, eeg, visualization + This script shows an example of how to use CSD :footcite:`PerrinEtAl1987,PerrinEtAl1989,Cohen2014,KayserTenke2015`. CSD takes the spatial Laplacian of the sensor signal (derivative in both diff --git a/examples/preprocessing/eog_artifact_histogram.py b/examples/preprocessing/eog_artifact_histogram.py index ac51d8b1f39..979704d7e8e 100644 --- a/examples/preprocessing/eog_artifact_histogram.py +++ b/examples/preprocessing/eog_artifact_histogram.py @@ -5,6 +5,8 @@ Show EOG artifact timing ======================== +.. tags:: examples, preprocessing, artifacts, epochs, visualization + Compute the distribution of timing for EOG artifacts. """ diff --git a/examples/preprocessing/eog_regression.py b/examples/preprocessing/eog_regression.py index e3b8341e744..dda6f3517cf 100644 --- a/examples/preprocessing/eog_regression.py +++ b/examples/preprocessing/eog_regression.py @@ -3,6 +3,8 @@ Reduce EOG artifacts through regression ======================================= +.. tags:: examples, preprocessing, artifacts, epochs, evoked + Reduce artifacts by regressing the EOG channels onto the rest of the channels and then subtracting the EOG signal. diff --git a/examples/preprocessing/epochs_metadata.py b/examples/preprocessing/epochs_metadata.py index 9c46368afa0..95a249c5403 100644 --- a/examples/preprocessing/epochs_metadata.py +++ b/examples/preprocessing/epochs_metadata.py @@ -5,6 +5,8 @@ Automated epochs metadata generation with variable time windows =============================================================== +.. tags:: examples, epochs, events, preprocessing, eeg + When working with :class:`~mne.Epochs`, :ref:`metadata ` can be invaluable. There is an extensive tutorial on :ref:`how it can be generated automatically `. diff --git a/examples/preprocessing/esg_rm_heart_artefact_pcaobs.py b/examples/preprocessing/esg_rm_heart_artefact_pcaobs.py index a6c6bb3c2ba..1915dc92a8f 100755 --- a/examples/preprocessing/esg_rm_heart_artefact_pcaobs.py +++ b/examples/preprocessing/esg_rm_heart_artefact_pcaobs.py @@ -5,6 +5,8 @@ Principal Component Analysis - Optimal Basis Sets (PCA-OBS) removing cardiac artefact ===================================================================================== +.. tags:: examples, preprocessing, artifacts, eeg + This script shows an example of how to use an adaptation of PCA-OBS :footcite:`NiazyEtAl2005`. PCA-OBS was originally designed to remove the ballistocardiographic artefact in simultaneous EEG-fMRI. Here, it diff --git a/examples/preprocessing/find_ref_artifacts.py b/examples/preprocessing/find_ref_artifacts.py index 90e3d1fb0da..8a5798f12ea 100644 --- a/examples/preprocessing/find_ref_artifacts.py +++ b/examples/preprocessing/find_ref_artifacts.py @@ -5,6 +5,8 @@ Find MEG reference channel artifacts ==================================== +.. tags:: examples, preprocessing, artifacts, ica, meg + Use ICA decompositions of MEG reference channels to remove intermittent noise. Many MEG systems have an array of reference channels which are used to detect diff --git a/examples/preprocessing/fnirs_artifact_removal.py b/examples/preprocessing/fnirs_artifact_removal.py index 7c4855086a7..58395ed1f90 100644 --- a/examples/preprocessing/fnirs_artifact_removal.py +++ b/examples/preprocessing/fnirs_artifact_removal.py @@ -5,6 +5,8 @@ Visualise NIRS artifact correction methods ========================================== +.. tags:: examples, preprocessing, fnirs, artifacts, visualization + Here we artificially introduce several fNIRS artifacts and observe how artifact correction techniques attempt to correct the data. diff --git a/examples/preprocessing/ica_comparison.py b/examples/preprocessing/ica_comparison.py index d4246b80362..ee78bcb6c66 100644 --- a/examples/preprocessing/ica_comparison.py +++ b/examples/preprocessing/ica_comparison.py @@ -5,6 +5,8 @@ Compare the different ICA algorithms in MNE =========================================== +.. tags:: examples, preprocessing, ica, meg, visualization + Different ICA algorithms are fit to raw MEG data, and the corresponding maps are displayed. diff --git a/examples/preprocessing/interpolate_bad_channels.py b/examples/preprocessing/interpolate_bad_channels.py index a56aec7d8f7..81ad1dff79f 100644 --- a/examples/preprocessing/interpolate_bad_channels.py +++ b/examples/preprocessing/interpolate_bad_channels.py @@ -5,6 +5,8 @@ Interpolate bad channels for MEG/EEG channels ============================================= +.. tags:: examples, preprocessing, meg, eeg, evoked + This example shows how to interpolate bad MEG/EEG channels - Using spherical splines from :footcite:`PerrinEtAl1989` for EEG data. diff --git a/examples/preprocessing/interpolate_to.py b/examples/preprocessing/interpolate_to.py index 5672dca3dd8..a5e12114673 100644 --- a/examples/preprocessing/interpolate_to.py +++ b/examples/preprocessing/interpolate_to.py @@ -5,6 +5,8 @@ Interpolate MEG or EEG data to any montage ====================================================== +.. tags:: examples, preprocessing, eeg, meg, evoked + This example demonstrates both EEG montage interpolation and MEG system transformation. diff --git a/examples/preprocessing/movement_compensation.py b/examples/preprocessing/movement_compensation.py index 4577a7e0a51..feed9fd8713 100644 --- a/examples/preprocessing/movement_compensation.py +++ b/examples/preprocessing/movement_compensation.py @@ -5,6 +5,8 @@ Maxwell filter data with movement compensation ============================================== +.. tags:: examples, preprocessing, meg, simulation, visualization + Demonstrate movement compensation on simulated data. The simulated data contains bilateral activation of auditory cortices, repeated over 14 different head rotations (head center held fixed). See the following for diff --git a/examples/preprocessing/movement_detection.py b/examples/preprocessing/movement_detection.py index dd468feb464..204f336142a 100644 --- a/examples/preprocessing/movement_detection.py +++ b/examples/preprocessing/movement_detection.py @@ -5,6 +5,8 @@ Annotate movement artifacts and reestimate dev_head_t ===================================================== +.. tags:: examples, preprocessing, artifacts, meg, visualization + Periods, where the participant moved considerably, are contaminated by low amplitude artifacts. When averaging the magnetic fields, the more spread the head position, the bigger the cancellation due to different locations. diff --git a/examples/preprocessing/muscle_detection.py b/examples/preprocessing/muscle_detection.py index 3e0e140c802..7334152ba94 100644 --- a/examples/preprocessing/muscle_detection.py +++ b/examples/preprocessing/muscle_detection.py @@ -5,6 +5,8 @@ Annotate muscle artifacts ========================= +.. tags:: examples, preprocessing, artifacts, meg, visualization + Muscle contractions produce high frequency activity that can mask brain signal of interest. Muscle artifacts can be produced when clenching the jaw, swallowing, or twitching a cranial muscle. Muscle artifacts are most diff --git a/examples/preprocessing/muscle_ica.py b/examples/preprocessing/muscle_ica.py index f61d1e22bc4..97be1ffa09d 100644 --- a/examples/preprocessing/muscle_ica.py +++ b/examples/preprocessing/muscle_ica.py @@ -5,6 +5,8 @@ Removing muscle ICA components ============================== +.. tags:: examples, preprocessing, ica, artifacts, eeg + Gross movements produce widespread high-frequency activity across all channels that is usually not recoverable and so the epoch must be rejected as shown in :ref:`ex-muscle-artifacts`. More ubiquitously than gross movements, muscle diff --git a/examples/preprocessing/otp.py b/examples/preprocessing/otp.py index a7d45d4cbc7..6bf592f075d 100644 --- a/examples/preprocessing/otp.py +++ b/examples/preprocessing/otp.py @@ -5,6 +5,8 @@ Plot sensor denoising using oversampled temporal projection =========================================================== +.. tags:: examples, preprocessing, artifacts, meg, visualization + This demonstrates denoising using the OTP algorithm :footcite:`LarsonTaulu2018` on data with with sensor artifacts (flux jumps) and random noise. """ diff --git a/examples/preprocessing/shift_evoked.py b/examples/preprocessing/shift_evoked.py index 0e8c52676fe..d7b44fd81ca 100644 --- a/examples/preprocessing/shift_evoked.py +++ b/examples/preprocessing/shift_evoked.py @@ -5,6 +5,8 @@ Shifting time-scale in evoked data ================================== +.. tags:: examples, preprocessing, evoked, meg, visualization + """ # Author: Mainak Jas # diff --git a/examples/preprocessing/virtual_evoked.py b/examples/preprocessing/virtual_evoked.py index 20f7527f1da..0acb668202f 100644 --- a/examples/preprocessing/virtual_evoked.py +++ b/examples/preprocessing/virtual_evoked.py @@ -5,6 +5,8 @@ Remap MEG channel types ======================= +.. tags:: examples, preprocessing, evoked, meg, visualization + In this example, MEG data are remapped from one channel type to another. This is useful to: diff --git a/examples/preprocessing/xdawn_denoising.py b/examples/preprocessing/xdawn_denoising.py index 20a6abc72fb..009fba3d11e 100644 --- a/examples/preprocessing/xdawn_denoising.py +++ b/examples/preprocessing/xdawn_denoising.py @@ -5,6 +5,8 @@ XDAWN Denoising =============== +.. tags:: examples, preprocessing, artifacts, meg, epochs + XDAWN filters are trained from epochs, signal is projected in the sources space and then projected back in the sensor space using only the first two XDAWN components. The process is similar to an ICA, but is diff --git a/examples/simulation/plot_stc_metrics.py b/examples/simulation/plot_stc_metrics.py index 8b481aed9e6..5b9b58c02b9 100644 --- a/examples/simulation/plot_stc_metrics.py +++ b/examples/simulation/plot_stc_metrics.py @@ -3,6 +3,8 @@ Compare simulated and estimated source activity =============================================== +.. tags:: examples, simulation, source-localization, inverse, statistics + This example illustrates how to compare the simulated and estimated source time courses (STC) by computing different metrics. Simulated source is a cortical region or dipole. It is meant to be a brief diff --git a/examples/simulation/simulate_evoked_data.py b/examples/simulation/simulate_evoked_data.py index 447f548e779..a92eff17aed 100644 --- a/examples/simulation/simulate_evoked_data.py +++ b/examples/simulation/simulate_evoked_data.py @@ -5,6 +5,8 @@ Generate simulated evoked data ============================== +.. tags:: examples, simulation, evoked, source-localization + Use :func:`~mne.simulation.simulate_sparse_stc` to simulate evoked data. """ # Author: Daniel Strohmeier diff --git a/examples/simulation/simulate_raw_data.py b/examples/simulation/simulate_raw_data.py index 0fbefca4480..bbcdd834ee3 100644 --- a/examples/simulation/simulate_raw_data.py +++ b/examples/simulation/simulate_raw_data.py @@ -5,6 +5,8 @@ Generate simulated raw data =========================== +.. tags:: examples, simulation, raw, forward-model + This example generates raw data by repeating a desired source activation multiple times. """ diff --git a/examples/simulation/simulated_raw_data_using_subject_anatomy.py b/examples/simulation/simulated_raw_data_using_subject_anatomy.py index ce6803e2ebe..94465f29f94 100644 --- a/examples/simulation/simulated_raw_data_using_subject_anatomy.py +++ b/examples/simulation/simulated_raw_data_using_subject_anatomy.py @@ -5,6 +5,8 @@ Simulate raw data using subject anatomy ======================================= +.. tags:: examples, simulation, raw, forward-model, inverse + This example illustrates how to generate source estimates and simulate raw data using subject anatomy with the :class:`mne.simulation.SourceSimulator` class. Once the raw data is simulated, generated source estimates are reconstructed diff --git a/examples/simulation/source_simulator.py b/examples/simulation/source_simulator.py index f417b96f181..1a96aef0419 100644 --- a/examples/simulation/source_simulator.py +++ b/examples/simulation/source_simulator.py @@ -5,6 +5,8 @@ Generate simulated source data ============================== +.. tags:: examples, simulation, source-localization, events, evoked + This example illustrates how to use the :class:`mne.simulation.SourceSimulator` class to generate source estimates and raw data. It is meant to be a brief introduction and only highlights the simplest use case. diff --git a/examples/stats/cluster_stats_evoked.py b/examples/stats/cluster_stats_evoked.py index b51601f2f32..b751f8b9785 100644 --- a/examples/stats/cluster_stats_evoked.py +++ b/examples/stats/cluster_stats_evoked.py @@ -5,6 +5,8 @@ Permutation F-test on sensor data with 1D cluster level ======================================================= +.. tags:: examples, statistics, cluster-permutation, evoked + One tests if the evoked response is significantly different between conditions. Multiple comparison problem is addressed with cluster level permutation test. diff --git a/examples/stats/fdr_stats_evoked.py b/examples/stats/fdr_stats_evoked.py index f7b78f7c559..4d932ae6754 100644 --- a/examples/stats/fdr_stats_evoked.py +++ b/examples/stats/fdr_stats_evoked.py @@ -5,6 +5,8 @@ FDR correction on T-test on sensor data ======================================= +.. tags:: examples, statistics, evoked, epochs, meg + One tests if the evoked response significantly deviates from 0. Multiple comparison problem is addressed with False Discovery Rate (FDR) correction. diff --git a/examples/stats/linear_regression_raw.py b/examples/stats/linear_regression_raw.py index 5c09e5a9443..9d6ff935c12 100644 --- a/examples/stats/linear_regression_raw.py +++ b/examples/stats/linear_regression_raw.py @@ -5,6 +5,8 @@ Regression on continuous data (rER[P/F]) ======================================== +.. tags:: examples, statistics, raw, evoked, meg + This demonstrates how rER[P/F]s - regressing the continuous data - is a generalisation of traditional averaging. If all preprocessing steps are the same, no overlap between epochs exists, and if all diff --git a/examples/stats/sensor_permutation_test.py b/examples/stats/sensor_permutation_test.py index ded8cb9c314..f084c7ac83c 100644 --- a/examples/stats/sensor_permutation_test.py +++ b/examples/stats/sensor_permutation_test.py @@ -5,6 +5,8 @@ Permutation T-test on sensor data ================================= +.. tags:: examples, statistics, meg, epochs, visualization + One tests if the signal significantly deviates from 0 during a fixed time window of interest. Here computation is performed on MNE sample dataset between 40 and 60 ms. diff --git a/examples/stats/sensor_regression.py b/examples/stats/sensor_regression.py index e3f1452badb..eae628f2c88 100644 --- a/examples/stats/sensor_regression.py +++ b/examples/stats/sensor_regression.py @@ -5,6 +5,8 @@ Analysing continuous features with binning and regression in sensor space ========================================================================= +.. tags:: examples, statistics, eeg, epochs, evoked + Predict single trial activity from a continuous variable. A single-trial regression is performed in each sensor and timepoint individually, resulting in an :class:`mne.Evoked` object which contains the diff --git a/examples/time_frequency/compute_csd.py b/examples/time_frequency/compute_csd.py index 7d46770c14c..07bdc205b5a 100644 --- a/examples/time_frequency/compute_csd.py +++ b/examples/time_frequency/compute_csd.py @@ -5,6 +5,8 @@ Compute a cross-spectral density (CSD) matrix ============================================= +.. tags:: examples, connectivity, time-frequency, beamforming + A cross-spectral density (CSD) matrix is similar to a covariance matrix, but in the time-frequency domain. It is the first step towards computing sensor-to-sensor coherence or a DICS beamformer. diff --git a/examples/time_frequency/compute_source_psd_epochs.py b/examples/time_frequency/compute_source_psd_epochs.py index 21901e82eaa..dbc64a8456b 100644 --- a/examples/time_frequency/compute_source_psd_epochs.py +++ b/examples/time_frequency/compute_source_psd_epochs.py @@ -5,6 +5,8 @@ Compute Power Spectral Density of inverse solution from single epochs ===================================================================== +.. tags:: examples, time-frequency, inverse, source-localization, epochs + Compute PSD of dSPM inverse solution on single trial epochs restricted to a brain label. The PSD is computed using a multi-taper method with Discrete Prolate Spheroidal Sequence (DPSS) windows. diff --git a/examples/time_frequency/source_label_time_frequency.py b/examples/time_frequency/source_label_time_frequency.py index 80a25fffab9..0f2e76596e6 100644 --- a/examples/time_frequency/source_label_time_frequency.py +++ b/examples/time_frequency/source_label_time_frequency.py @@ -5,6 +5,8 @@ Compute power and phase lock in label of the source space ========================================================= +.. tags:: examples, time-frequency, source-localization, inverse, epochs + Compute time-frequency maps of power and phase lock in the source space. The inverse method is linear based on dSPM inverse operator. diff --git a/examples/time_frequency/source_power_spectrum.py b/examples/time_frequency/source_power_spectrum.py index 77af97a8726..a9f2a42f7da 100644 --- a/examples/time_frequency/source_power_spectrum.py +++ b/examples/time_frequency/source_power_spectrum.py @@ -5,6 +5,8 @@ Compute source power spectral density (PSD) in a label ====================================================== +.. tags:: examples, time-frequency, source-localization, inverse, meg + Returns an STC file containing the PSD (in dB) of each of the sources within a label. """ diff --git a/examples/time_frequency/source_power_spectrum_opm.py b/examples/time_frequency/source_power_spectrum_opm.py index ae8152670f2..eb7f33f8fd8 100644 --- a/examples/time_frequency/source_power_spectrum_opm.py +++ b/examples/time_frequency/source_power_spectrum_opm.py @@ -5,6 +5,8 @@ Compute source power spectral density (PSD) of VectorView and OPM data ====================================================================== +.. tags:: examples, time-frequency, source-localization, inverse, meg + Here we compute the resting state from raw for data recorded using a Neuromag VectorView system and a custom OPM system. The pipeline is meant to mostly follow the Brainstorm :footcite:`TadelEtAl2011` diff --git a/examples/time_frequency/source_space_time_frequency.py b/examples/time_frequency/source_space_time_frequency.py index c5ca425dd4c..5dda356755b 100644 --- a/examples/time_frequency/source_space_time_frequency.py +++ b/examples/time_frequency/source_space_time_frequency.py @@ -5,6 +5,8 @@ Compute induced power in the source space with dSPM =================================================== +.. tags:: examples, time-frequency, source-localization, inverse, epochs + Returns STC files ie source estimates of induced power for different bands in the source space. The inverse method is linear based on dSPM inverse operator. diff --git a/examples/time_frequency/temporal_whitening.py b/examples/time_frequency/temporal_whitening.py index 3a0a04c01c5..b25a686c191 100644 --- a/examples/time_frequency/temporal_whitening.py +++ b/examples/time_frequency/temporal_whitening.py @@ -5,6 +5,8 @@ Temporal whitening with AR model ================================ +.. tags:: examples, time-frequency, preprocessing, meg, visualization + Here we fit an AR model to the data and use it to temporally whiten the signals. diff --git a/examples/time_frequency/time_frequency_erds.py b/examples/time_frequency/time_frequency_erds.py index 93272eb7aa3..794e56cad4c 100644 --- a/examples/time_frequency/time_frequency_erds.py +++ b/examples/time_frequency/time_frequency_erds.py @@ -5,6 +5,8 @@ Compute and visualize ERDS maps =============================== +.. tags:: examples, time-frequency, statistics, eeg, epochs + This example calculates and displays ERDS maps of event-related EEG data. ERDS (sometimes also written as ERD/ERS) is short for event-related desynchronization (ERD) and event-related synchronization (ERS) diff --git a/examples/time_frequency/time_frequency_global_field_power.py b/examples/time_frequency/time_frequency_global_field_power.py index cc4ff14ce2a..69b1c3c9290 100644 --- a/examples/time_frequency/time_frequency_global_field_power.py +++ b/examples/time_frequency/time_frequency_global_field_power.py @@ -5,6 +5,8 @@ Explore event-related dynamics for specific frequency bands =========================================================== +.. tags:: examples, time-frequency, evoked, meg, epochs + The objective is to show you how to explore spectrally localized effects. For this purpose we adapt the method described in :footcite:`HariSalmelin1997` and use it on the somato dataset. diff --git a/examples/time_frequency/time_frequency_simulated.py b/examples/time_frequency/time_frequency_simulated.py index dc42f16da3a..f6ca3488d92 100644 --- a/examples/time_frequency/time_frequency_simulated.py +++ b/examples/time_frequency/time_frequency_simulated.py @@ -5,6 +5,8 @@ Time-frequency on simulated data (Multitaper vs. Morlet vs. Stockwell vs. Hilbert) ================================================================================== +.. tags:: examples, time-frequency, evoked, epochs, meg + This example demonstrates the different time-frequency estimation methods on simulated data. It shows the time-frequency resolution trade-off and the problem of estimation variance. In addition it highlights diff --git a/examples/visualization/3d_to_2d.py b/examples/visualization/3d_to_2d.py index 47b223e8396..482ac63d4ae 100644 --- a/examples/visualization/3d_to_2d.py +++ b/examples/visualization/3d_to_2d.py @@ -5,6 +5,8 @@ How to convert 3D electrode positions to a 2D image =================================================== +.. tags:: examples, visualization, clinical + Sometimes we want to convert a 3D representation of electrodes into a 2D image. For example, if we are using electrocorticography it is common to create scatterplots on top of a brain, with each point representing an electrode. diff --git a/examples/visualization/brain.py b/examples/visualization/brain.py index 8e7c8226449..87256793cec 100644 --- a/examples/visualization/brain.py +++ b/examples/visualization/brain.py @@ -5,6 +5,8 @@ Plotting with ``mne.viz.Brain`` =============================== +.. tags:: examples, visualization, source-localization, freesurfer + In this example, we'll show how to use :class:`mne.viz.Brain`. """ # Author: Alex Rockhill diff --git a/examples/visualization/channel_epochs_image.py b/examples/visualization/channel_epochs_image.py index 9281270a8c1..4331ab87cbc 100644 --- a/examples/visualization/channel_epochs_image.py +++ b/examples/visualization/channel_epochs_image.py @@ -5,6 +5,8 @@ Visualize channel over epochs as an image ========================================= +.. tags:: examples, visualization, epochs, meg, evoked + This will produce what is sometimes called an event related potential / field (ERP/ERF) image. diff --git a/examples/visualization/eeg_on_scalp.py b/examples/visualization/eeg_on_scalp.py index 1146ae229ef..d43fa879099 100644 --- a/examples/visualization/eeg_on_scalp.py +++ b/examples/visualization/eeg_on_scalp.py @@ -5,6 +5,8 @@ Plotting EEG sensors on the scalp ================================= +.. tags:: examples, visualization, eeg + In this example, digitized EEG sensor locations are shown on the scalp surface. """ # Author: Eric Larson diff --git a/examples/visualization/evoked_arrowmap.py b/examples/visualization/evoked_arrowmap.py index bdad8d4ad79..25efa4baa1f 100644 --- a/examples/visualization/evoked_arrowmap.py +++ b/examples/visualization/evoked_arrowmap.py @@ -5,6 +5,8 @@ Plotting topographic arrowmaps of evoked data ============================================= +.. tags:: examples, visualization, evoked, meg + Load evoked data and plot arrowmaps along with the topomap for selected time points. An arrowmap is based upon the Hosaka-Cohen transformation and represents an estimation of the current flow underneath the MEG sensors. diff --git a/examples/visualization/evoked_topomap.py b/examples/visualization/evoked_topomap.py index 53b7a60dbba..796c8895efe 100644 --- a/examples/visualization/evoked_topomap.py +++ b/examples/visualization/evoked_topomap.py @@ -5,6 +5,8 @@ Plotting topographic maps of evoked data ======================================== +.. tags:: examples, visualization, evoked, eeg, meg + Load evoked data and plot topomaps for selected time points using multiple additional options. """ diff --git a/examples/visualization/evoked_whitening.py b/examples/visualization/evoked_whitening.py index 4bcb4bc8c04..00dda9221bb 100644 --- a/examples/visualization/evoked_whitening.py +++ b/examples/visualization/evoked_whitening.py @@ -5,6 +5,8 @@ Whitening evoked data with a noise covariance ============================================= +.. tags:: examples, visualization, evoked, preprocessing, meg + Evoked data are loaded and then whitened using a given noise covariance matrix. It's an excellent quality check to see if baseline signals match the assumption of Gaussian white noise during the baseline period. diff --git a/examples/visualization/eyetracking_plot_heatmap.py b/examples/visualization/eyetracking_plot_heatmap.py index 07983685b5e..fa580bca851 100644 --- a/examples/visualization/eyetracking_plot_heatmap.py +++ b/examples/visualization/eyetracking_plot_heatmap.py @@ -5,6 +5,8 @@ Plotting eye-tracking heatmaps in MNE-Python ============================================= +.. tags:: examples, visualization, preprocessing, epochs + This tutorial covers plotting eye-tracking position data as a heatmap. .. seealso:: diff --git a/examples/visualization/meg_sensors.py b/examples/visualization/meg_sensors.py index 182a8ee8940..78b062fe44a 100644 --- a/examples/visualization/meg_sensors.py +++ b/examples/visualization/meg_sensors.py @@ -5,6 +5,8 @@ Plotting sensor layouts of MEG systems ====================================== +.. tags:: examples, visualization, meg + Show sensor layouts of different MEG systems. """ # Author: Eric Larson diff --git a/examples/visualization/mne_helmet.py b/examples/visualization/mne_helmet.py index ceb149d77ba..969da6b8af0 100644 --- a/examples/visualization/mne_helmet.py +++ b/examples/visualization/mne_helmet.py @@ -5,6 +5,8 @@ Plot the MNE brain and helmet ============================= +.. tags:: examples, visualization, meg, evoked + This tutorial shows how to make the MNE helmet + brain image. """ diff --git a/examples/visualization/parcellation.py b/examples/visualization/parcellation.py index d92a849b970..a254600b887 100644 --- a/examples/visualization/parcellation.py +++ b/examples/visualization/parcellation.py @@ -5,6 +5,8 @@ Plot a cortical parcellation ============================ +.. tags:: examples, visualization, source-localization + In this example, we download the HCP-MMP1.0 parcellation :footcite:`GlasserEtAl2016` and show it on ``fsaverage``. We will also download the customized 448-label aparc diff --git a/examples/visualization/roi_erpimage_by_rt.py b/examples/visualization/roi_erpimage_by_rt.py index f9cd9f708cf..fc32685a1d0 100644 --- a/examples/visualization/roi_erpimage_by_rt.py +++ b/examples/visualization/roi_erpimage_by_rt.py @@ -5,6 +5,8 @@ Plot single trial activity, grouped by ROI and sorted by RT =========================================================== +.. tags:: examples, visualization, epochs, eeg, events + This will produce what is sometimes called an event related potential / field (ERP/ERF) image. diff --git a/examples/visualization/ssp_projs_sensitivity_map.py b/examples/visualization/ssp_projs_sensitivity_map.py index 65a96cd9908..f580717e469 100644 --- a/examples/visualization/ssp_projs_sensitivity_map.py +++ b/examples/visualization/ssp_projs_sensitivity_map.py @@ -5,6 +5,8 @@ Sensitivity map of SSP projections ================================== +.. tags:: examples, visualization, artifacts, forward-model + This example shows the sources that have a forward field similar to the first SSP vector correcting for ECG. """ diff --git a/examples/visualization/topo_compare_conditions.py b/examples/visualization/topo_compare_conditions.py index eb2699eb262..a2d05070478 100644 --- a/examples/visualization/topo_compare_conditions.py +++ b/examples/visualization/topo_compare_conditions.py @@ -5,6 +5,8 @@ Compare evoked responses for different conditions ================================================= +.. tags:: examples, visualization, evoked, epochs, meg + In this example, an Epochs object for visual and auditory responses is created. Both conditions are then accessed by their respective names to create a sensor layout plot of the related evoked responses. diff --git a/examples/visualization/topo_customized.py b/examples/visualization/topo_customized.py index 2303961f9da..c4b33ae2d4b 100644 --- a/examples/visualization/topo_customized.py +++ b/examples/visualization/topo_customized.py @@ -5,6 +5,8 @@ Plot custom topographies for MEG sensors ======================================== +.. tags:: examples, visualization, time-frequency, meg + This example exposes the :func:`~mne.viz.iter_topography` function that makes it very easy to generate custom sensor topography plots. Here we will plot the power spectrum of each channel on a topographic diff --git a/examples/visualization/xhemi.py b/examples/visualization/xhemi.py index e0974a30a58..a9c6529b615 100644 --- a/examples/visualization/xhemi.py +++ b/examples/visualization/xhemi.py @@ -5,6 +5,8 @@ Cross-hemisphere comparison =========================== +.. tags:: examples, visualization, source-localization + This example illustrates how to visualize the difference between activity in the left and the right hemisphere. The data from the right hemisphere is mapped to the left hemisphere, and then the difference is plotted. For more diff --git a/pyproject.toml b/pyproject.toml index 53248fe8dd2..8492985492e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ doc = [ "sphinx >= 6", "sphinx-design", "sphinx-gallery >= 0.16", + "sphinx-tags", "sphinx_copybutton", "sphinxcontrib-bibtex >= 2.5", "sphinxcontrib-towncrier >= 0.5.0a0", @@ -321,6 +322,7 @@ ignore_directives = [ "minigallery", "related-software", "rst-class", + "tags", "tab-set", "tabularcolumns", "toctree", diff --git a/tutorials/clinical/20_seeg.py b/tutorials/clinical/20_seeg.py index ea56ea8a688..af3cc4c6ba9 100644 --- a/tutorials/clinical/20_seeg.py +++ b/tutorials/clinical/20_seeg.py @@ -5,6 +5,8 @@ Working with sEEG data ====================== +.. tags:: tutorials, clinical, source-localization, visualization + MNE-Python supports working with more than just MEG and EEG data. Here we show some of the functions that can be used to facilitate working with stereoelectroencephalography (sEEG) data. diff --git a/tutorials/clinical/30_ecog.py b/tutorials/clinical/30_ecog.py index 4994a23d686..740924ed1f9 100644 --- a/tutorials/clinical/30_ecog.py +++ b/tutorials/clinical/30_ecog.py @@ -5,6 +5,8 @@ Working with ECoG data ====================== +.. tags:: tutorials, clinical, visualization, preprocessing + MNE supports working with more than just MEG and EEG data. Here we show some of the functions that can be used to facilitate working with electrocorticography (ECoG) data. diff --git a/tutorials/clinical/60_sleep.py b/tutorials/clinical/60_sleep.py index e50b0740a7c..faa8fa36d6f 100644 --- a/tutorials/clinical/60_sleep.py +++ b/tutorials/clinical/60_sleep.py @@ -4,6 +4,8 @@ Sleep stage classification from polysomnography (PSG) data ========================================================== +.. tags:: tutorials, clinical, classification, machine-learning, epochs, eeg + .. note:: This code is taken from the analysis code used in :footcite:`ChambonEtAl2018`. If you reuse this code please consider citing this work. diff --git a/tutorials/epochs/10_epochs_overview.py b/tutorials/epochs/10_epochs_overview.py index 7d92da73a77..6e15e042417 100644 --- a/tutorials/epochs/10_epochs_overview.py +++ b/tutorials/epochs/10_epochs_overview.py @@ -5,6 +5,8 @@ The Epochs data structure: discontinuous data ============================================= +.. tags:: tutorials, epochs, events, beginners + This tutorial covers the basics of creating and working with :term:`epoched ` data. It introduces the :class:`~mne.Epochs` data structure in detail, including how to load, query, subselect, export, and plot data from an diff --git a/tutorials/epochs/15_baseline_regression.py b/tutorials/epochs/15_baseline_regression.py index 326e075dd04..bde924f5663 100644 --- a/tutorials/epochs/15_baseline_regression.py +++ b/tutorials/epochs/15_baseline_regression.py @@ -5,6 +5,8 @@ Regression-based baseline correction ==================================================================== +.. tags:: tutorials, epochs, preprocessing, statistics, eeg + This tutorial compares traditional baseline correction (adding or subtracting a scalar amount from every timepoint in an epoch) to a regression-based approach to baseline correction (which allows the effect of the baseline period to vary diff --git a/tutorials/epochs/20_visualize_epochs.py b/tutorials/epochs/20_visualize_epochs.py index 04eca026b7e..e692a174de2 100644 --- a/tutorials/epochs/20_visualize_epochs.py +++ b/tutorials/epochs/20_visualize_epochs.py @@ -5,6 +5,8 @@ Visualizing epoched data ======================== +.. tags:: tutorials, epochs, visualization, time-frequency + This tutorial shows how to plot epoched data as time series, how to plot the spectral density of epoched data, how to plot epochs as an imagemap, and how to plot the sensor locations and projectors stored in `~mne.Epochs` objects. diff --git a/tutorials/epochs/30_epochs_metadata.py b/tutorials/epochs/30_epochs_metadata.py index a9019318034..6176ba845fd 100644 --- a/tutorials/epochs/30_epochs_metadata.py +++ b/tutorials/epochs/30_epochs_metadata.py @@ -5,6 +5,8 @@ Working with Epoch metadata =========================== +.. tags:: tutorials, epochs, eeg, visualization + This tutorial shows how to add metadata to `~mne.Epochs` objects, and how to use :ref:`Pandas query strings ` to select and plot epochs based on metadata properties. diff --git a/tutorials/epochs/40_autogenerate_metadata.py b/tutorials/epochs/40_autogenerate_metadata.py index 5e3b8a05eb1..7255c13acbf 100644 --- a/tutorials/epochs/40_autogenerate_metadata.py +++ b/tutorials/epochs/40_autogenerate_metadata.py @@ -5,6 +5,8 @@ Auto-generating Epochs metadata =============================== +.. tags:: tutorials, epochs, events, eeg, evoked + This tutorial shows how to auto-generate metadata for `~mne.Epochs`, based on events via `mne.epochs.make_metadata`. diff --git a/tutorials/epochs/50_epochs_to_data_frame.py b/tutorials/epochs/50_epochs_to_data_frame.py index a6c5558fb2b..4ed9653fac6 100644 --- a/tutorials/epochs/50_epochs_to_data_frame.py +++ b/tutorials/epochs/50_epochs_to_data_frame.py @@ -5,6 +5,8 @@ Exporting Epochs to Pandas DataFrames ===================================== +.. tags:: tutorials, epochs, meg, eeg, visualization + This tutorial shows how to export the data in :class:`~mne.Epochs` objects to a :class:`Pandas DataFrame `, and applies a typical Pandas :doc:`split-apply-combine ` workflow to examine the diff --git a/tutorials/epochs/60_make_fixed_length_epochs.py b/tutorials/epochs/60_make_fixed_length_epochs.py index 10b8c12ea19..7a9e95170af 100644 --- a/tutorials/epochs/60_make_fixed_length_epochs.py +++ b/tutorials/epochs/60_make_fixed_length_epochs.py @@ -4,6 +4,8 @@ Divide continuous data into equally-spaced epochs ================================================= +.. tags:: tutorials, epochs, raw, connectivity, meg + This tutorial shows how to segment continuous data into a set of epochs spaced equidistantly in time. The epochs will not be created based on experimental events; instead, the continuous data will be "chunked" into consecutive epochs (which may be diff --git a/tutorials/evoked/10_evoked_overview.py b/tutorials/evoked/10_evoked_overview.py index b251a1f8239..24b0d7a4cf5 100644 --- a/tutorials/evoked/10_evoked_overview.py +++ b/tutorials/evoked/10_evoked_overview.py @@ -5,6 +5,8 @@ The Evoked data structure: evoked/averaged data =============================================== +.. tags:: tutorials, evoked, epochs, beginners + This tutorial covers the basics of creating and working with :term:`evoked` data. It introduces the :class:`~mne.Evoked` data structure in detail, including how to load, query, subset, export, and plot data from an :class:`~mne.Evoked` object. For details diff --git a/tutorials/evoked/20_visualize_evoked.py b/tutorials/evoked/20_visualize_evoked.py index 3a91c38bb3d..fa12ee8e154 100644 --- a/tutorials/evoked/20_visualize_evoked.py +++ b/tutorials/evoked/20_visualize_evoked.py @@ -5,6 +5,8 @@ Visualizing Evoked data ======================= +.. tags:: tutorials, evoked, visualization, meg + This tutorial shows the different visualization methods for :class:`~mne.Evoked` objects. diff --git a/tutorials/evoked/30_eeg_erp.py b/tutorials/evoked/30_eeg_erp.py index 63fe9126618..be6a22576db 100644 --- a/tutorials/evoked/30_eeg_erp.py +++ b/tutorials/evoked/30_eeg_erp.py @@ -5,6 +5,8 @@ EEG analysis - Event-Related Potentials (ERPs) ============================================== +.. tags:: tutorials, evoked, eeg, epochs, preprocessing + This tutorial shows how to perform standard ERP analyses in MNE-Python. Most of the material here is covered in other tutorials too, but for convenience the functions and methods most useful for ERP analyses are collected here, with diff --git a/tutorials/evoked/40_whitened.py b/tutorials/evoked/40_whitened.py index a3110139b4e..406c87ddaab 100644 --- a/tutorials/evoked/40_whitened.py +++ b/tutorials/evoked/40_whitened.py @@ -5,6 +5,8 @@ Plotting whitened data ====================== +.. tags:: tutorials, evoked, visualization, preprocessing, epochs + This tutorial demonstrates how to plot :term:`whitened ` evoked data. Data are whitened for many processes, including dipole fitting, source diff --git a/tutorials/forward/10_background_freesurfer.py b/tutorials/forward/10_background_freesurfer.py index 80ffe009b3b..f38be532878 100644 --- a/tutorials/forward/10_background_freesurfer.py +++ b/tutorials/forward/10_background_freesurfer.py @@ -5,6 +5,8 @@ FreeSurfer MRI reconstruction ============================= +.. tags:: tutorials, freesurfer, forward-model, source-localization + This tutorial covers how to use FreeSurfer alongside MNE-Python, to handle the structural MRI data that we use to build subject-specific anatomical models of the scalp, inner/outer skull, and cortical surface. diff --git a/tutorials/forward/20_source_alignment.py b/tutorials/forward/20_source_alignment.py index cf80cc2d4c2..43b2919175a 100644 --- a/tutorials/forward/20_source_alignment.py +++ b/tutorials/forward/20_source_alignment.py @@ -5,6 +5,8 @@ Source alignment and coordinate frames ====================================== +.. tags:: tutorials, forward-model, visualization, meg + This tutorial shows how to visually assess the spatial alignment of MEG sensor locations, digitized scalp landmark and sensor locations, and MRI volumes. This alignment process is crucial for computing the forward solution, as is diff --git a/tutorials/forward/25_automated_coreg.py b/tutorials/forward/25_automated_coreg.py index b25da7a083a..ecd07153e80 100644 --- a/tutorials/forward/25_automated_coreg.py +++ b/tutorials/forward/25_automated_coreg.py @@ -5,6 +5,8 @@ Using an automated approach to coregistration ============================================= +.. tags:: tutorials, forward-model, meg, visualization + This example shows how to use the coregistration functions to perform an automated MEG-MRI coregistration via scripting. Generally the results of this approach are consistent with those obtained from manual diff --git a/tutorials/forward/30_forward.py b/tutorials/forward/30_forward.py index ef39786cdf5..580c2938c0a 100644 --- a/tutorials/forward/30_forward.py +++ b/tutorials/forward/30_forward.py @@ -5,6 +5,8 @@ Head model and forward computation ================================== +.. tags:: tutorials, forward-model, bem, source-localization + The aim of this tutorial is to be a getting started for forward computation. For more extensive details and presentation of the general concepts for forward diff --git a/tutorials/forward/35_eeg_no_mri.py b/tutorials/forward/35_eeg_no_mri.py index 9324898ccf5..0c65abe7508 100644 --- a/tutorials/forward/35_eeg_no_mri.py +++ b/tutorials/forward/35_eeg_no_mri.py @@ -5,6 +5,8 @@ EEG forward operator with a template MRI ======================================== +.. tags:: tutorials, eeg, forward-model, bem, source-localization + This tutorial explains how to compute the forward operator from EEG data using the standard template MRI subject :ref:`fsaverage `. diff --git a/tutorials/forward/50_background_freesurfer_mne.py b/tutorials/forward/50_background_freesurfer_mne.py index 57493d14b70..ed30774369a 100644 --- a/tutorials/forward/50_background_freesurfer_mne.py +++ b/tutorials/forward/50_background_freesurfer_mne.py @@ -5,6 +5,8 @@ How MNE uses FreeSurfer's outputs ================================= +.. tags:: tutorials, freesurfer, source-localization + This tutorial explains how MRI coordinate frames are handled in MNE-Python, and how MNE-Python integrates with FreeSurfer for handling MRI data and source space data in general. diff --git a/tutorials/forward/80_fix_bem_in_blender.py b/tutorials/forward/80_fix_bem_in_blender.py index 948aaa3653b..2e30dfdd26e 100644 --- a/tutorials/forward/80_fix_bem_in_blender.py +++ b/tutorials/forward/80_fix_bem_in_blender.py @@ -5,6 +5,8 @@ Fixing BEM and head surfaces ============================ +.. tags:: tutorials, forward-model, bem + Sometimes when creating a BEM model the surfaces need manual correction because of a series of problems that can arise (e.g. intersection between surfaces). Here, we will see how this can be achieved by exporting the surfaces to the 3D diff --git a/tutorials/forward/90_compute_covariance.py b/tutorials/forward/90_compute_covariance.py index 37c2329f439..9628ac07a41 100644 --- a/tutorials/forward/90_compute_covariance.py +++ b/tutorials/forward/90_compute_covariance.py @@ -5,6 +5,8 @@ Computing a covariance matrix ============================= +.. tags:: tutorials, forward-model, covariance, inverse + Many methods in MNE, including source estimation and some classification algorithms, require covariance estimations from the recordings. In this tutorial we cover the basics of sensor covariance computations and construct a noise covariance matrix that can be diff --git a/tutorials/intro/10_overview.py b/tutorials/intro/10_overview.py index f61745b0024..99cff5f7a27 100644 --- a/tutorials/intro/10_overview.py +++ b/tutorials/intro/10_overview.py @@ -5,6 +5,8 @@ Overview of MEG/EEG analysis with MNE-Python ============================================ +.. tags:: tutorials, beginners, meg, eeg, workflow + This tutorial covers the basic EEG/MEG pipeline for event-related analysis: loading data, epoching, averaging, plotting, and estimating cortical activity from sensor data. It introduces the core MNE-Python data structures `~mne.io.Raw`, `~mne.Epochs`, diff --git a/tutorials/intro/15_inplace.py b/tutorials/intro/15_inplace.py index e9cbd4769f1..e3ed4e330e6 100644 --- a/tutorials/intro/15_inplace.py +++ b/tutorials/intro/15_inplace.py @@ -5,6 +5,8 @@ Modifying data in-place ======================= +.. tags:: tutorials, beginners, in-place, preprocessing, workflow + Many of MNE-Python's data objects (`~mne.io.Raw`, `~mne.Epochs`, `~mne.Evoked`, etc) have methods that modify the data in-place (either optionally or obligatorily). This can be advantageous when working with large datasets because it reduces the amount of diff --git a/tutorials/intro/20_events_from_raw.py b/tutorials/intro/20_events_from_raw.py index 2c368646908..57863afbec9 100644 --- a/tutorials/intro/20_events_from_raw.py +++ b/tutorials/intro/20_events_from_raw.py @@ -5,6 +5,8 @@ Parsing events from raw data ============================ +.. tags:: tutorials, beginners, raw, events, annotations + This tutorial describes how to read experimental events from raw recordings, and how to convert between the two different representations of events within MNE-Python (Events arrays and Annotations objects). diff --git a/tutorials/intro/30_info.py b/tutorials/intro/30_info.py index c549688a8a9..fb706607243 100644 --- a/tutorials/intro/30_info.py +++ b/tutorials/intro/30_info.py @@ -5,6 +5,8 @@ The Info data structure ======================= +.. tags:: tutorials, beginners, raw, epochs, evoked + This tutorial describes the :class:`mne.Info` data structure, which keeps track of various recording details, and is attached to :class:`~mne.io.Raw`, :class:`~mne.Epochs`, and :class:`~mne.Evoked` objects. diff --git a/tutorials/intro/40_sensor_locations.py b/tutorials/intro/40_sensor_locations.py index f48bbcea382..0e12ecb8d73 100644 --- a/tutorials/intro/40_sensor_locations.py +++ b/tutorials/intro/40_sensor_locations.py @@ -5,6 +5,8 @@ Working with sensor locations ============================= +.. tags:: tutorials, beginners, sensor-locations, eeg, visualization + This tutorial describes how to read and plot sensor locations, and how MNE-Python handles physical locations of sensors. As usual we'll start by importing the modules we need: diff --git a/tutorials/intro/50_configure_mne.py b/tutorials/intro/50_configure_mne.py index 9e6896eaf98..3c0cd677751 100644 --- a/tutorials/intro/50_configure_mne.py +++ b/tutorials/intro/50_configure_mne.py @@ -5,6 +5,8 @@ Configuring MNE-Python ====================== +.. tags:: tutorials, beginners, workflow + This tutorial covers how to configure MNE-Python to suit your local system and your analysis preferences. diff --git a/tutorials/intro/70_report.py b/tutorials/intro/70_report.py index 31133610e2c..77821f757d5 100644 --- a/tutorials/intro/70_report.py +++ b/tutorials/intro/70_report.py @@ -5,6 +5,8 @@ Getting started with mne.Report =============================== +.. tags:: tutorials, beginners, report, quality-control + :class:`mne.Report` is a way to create interactive HTML summaries of your data. These reports can show many different visualizations for one or multiple participants. A common use case is creating diagnostic summaries to check data quality at different diff --git a/tutorials/inverse/10_stc_class.py b/tutorials/inverse/10_stc_class.py index 4c10fb05446..56c235e4cc1 100644 --- a/tutorials/inverse/10_stc_class.py +++ b/tutorials/inverse/10_stc_class.py @@ -5,6 +5,8 @@ The SourceEstimate data structure ================================= +.. tags:: tutorials, inverse, source-localization + Source estimates, commonly referred to as :term:`STC (Source Time Courses) `, are obtained from source localization methods. diff --git a/tutorials/inverse/20_dipole_fit.py b/tutorials/inverse/20_dipole_fit.py index e72e76dd0fd..a5e876d6ad9 100644 --- a/tutorials/inverse/20_dipole_fit.py +++ b/tutorials/inverse/20_dipole_fit.py @@ -5,6 +5,8 @@ Source localization with equivalent current dipole (ECD) fit ============================================================ +.. tags:: tutorials, inverse, source-localization + This shows how to fit a dipole :footcite:`Sarvas1987` using MNE-Python. For a comparison of fits between MNE-C and MNE-Python, see diff --git a/tutorials/inverse/30_mne_dspm_loreta.py b/tutorials/inverse/30_mne_dspm_loreta.py index 923ee534d71..f0bc570e5ab 100644 --- a/tutorials/inverse/30_mne_dspm_loreta.py +++ b/tutorials/inverse/30_mne_dspm_loreta.py @@ -5,6 +5,8 @@ Source localization with MNE, dSPM, sLORETA, and eLORETA ======================================================== +.. tags:: tutorials, inverse, source-localization, evoked + The aim of this tutorial is to teach you how to compute and apply a linear minimum-norm inverse method on evoked/raw/epochs data. """ diff --git a/tutorials/inverse/35_dipole_orientations.py b/tutorials/inverse/35_dipole_orientations.py index c145b404825..7fbb829eee7 100644 --- a/tutorials/inverse/35_dipole_orientations.py +++ b/tutorials/inverse/35_dipole_orientations.py @@ -5,6 +5,8 @@ The role of dipole orientations in distributed source localization ================================================================== +.. tags:: tutorials, inverse, source-localization + When performing source localization in a distributed manner (i.e., using MNE/dSPM/sLORETA/eLORETA), the source space is defined as a grid of dipoles that spans a large portion of diff --git a/tutorials/inverse/40_mne_fixed_free.py b/tutorials/inverse/40_mne_fixed_free.py index 52abb262345..e55651c4332 100644 --- a/tutorials/inverse/40_mne_fixed_free.py +++ b/tutorials/inverse/40_mne_fixed_free.py @@ -5,6 +5,8 @@ Computing various MNE solutions =============================== +.. tags:: tutorials, inverse, source-localization + This example shows example fixed- and free-orientation source localizations produced by the minimum-norm variants implemented in MNE-Python: MNE, dSPM, sLORETA, and eLORETA. diff --git a/tutorials/inverse/50_beamformer_lcmv.py b/tutorials/inverse/50_beamformer_lcmv.py index 7f84476dc3c..50ab88f5e59 100644 --- a/tutorials/inverse/50_beamformer_lcmv.py +++ b/tutorials/inverse/50_beamformer_lcmv.py @@ -5,6 +5,8 @@ Source reconstruction using an LCMV beamformer ============================================== +.. tags:: tutorials, beamforming, inverse, source-localization + This tutorial gives an overview of the beamformer method and shows how to reconstruct source activity using an LCMV beamformer. """ diff --git a/tutorials/inverse/60_visualize_stc.py b/tutorials/inverse/60_visualize_stc.py index 78301c4ee88..1211eb0ab6d 100644 --- a/tutorials/inverse/60_visualize_stc.py +++ b/tutorials/inverse/60_visualize_stc.py @@ -5,6 +5,8 @@ Visualize source time courses (stcs) ==================================== +.. tags:: tutorials, inverse, visualization, source-localization + This tutorial focuses on visualization of :term:`source estimates `. Surface Source Estimates diff --git a/tutorials/inverse/70_eeg_mri_coords.py b/tutorials/inverse/70_eeg_mri_coords.py index 20d6b62e4c9..7a17c2b27d4 100644 --- a/tutorials/inverse/70_eeg_mri_coords.py +++ b/tutorials/inverse/70_eeg_mri_coords.py @@ -5,6 +5,8 @@ EEG source localization given electrode locations on an MRI =========================================================== +.. tags:: tutorials, inverse, eeg, forward-model, source-localization + This tutorial explains how to compute the forward operator from EEG data when the electrodes are in MRI voxel coordinates. """ diff --git a/tutorials/inverse/80_brainstorm_phantom_elekta.py b/tutorials/inverse/80_brainstorm_phantom_elekta.py index c76a5c5f569..8ed47eebe3f 100644 --- a/tutorials/inverse/80_brainstorm_phantom_elekta.py +++ b/tutorials/inverse/80_brainstorm_phantom_elekta.py @@ -5,6 +5,8 @@ Brainstorm Elekta phantom dataset tutorial ========================================== +.. tags:: tutorials, inverse, source-localization + Here we compute the evoked from raw for the Brainstorm Elekta phantom tutorial dataset. For comparison, see :footcite:`TadelEtAl2011` and `the original Brainstorm tutorial diff --git a/tutorials/inverse/85_brainstorm_phantom_ctf.py b/tutorials/inverse/85_brainstorm_phantom_ctf.py index 1c0312e4f29..543a8e98a5f 100644 --- a/tutorials/inverse/85_brainstorm_phantom_ctf.py +++ b/tutorials/inverse/85_brainstorm_phantom_ctf.py @@ -5,6 +5,8 @@ Brainstorm CTF phantom dataset tutorial ======================================= +.. tags:: tutorials, inverse, source-localization + Here we compute the evoked from raw for the Brainstorm CTF phantom tutorial dataset. For comparison, see :footcite:`TadelEtAl2011` and: diff --git a/tutorials/inverse/90_phantom_4DBTi.py b/tutorials/inverse/90_phantom_4DBTi.py index 5c1230b049f..462047ce4fb 100644 --- a/tutorials/inverse/90_phantom_4DBTi.py +++ b/tutorials/inverse/90_phantom_4DBTi.py @@ -5,6 +5,8 @@ 4D Neuroimaging/BTi phantom dataset tutorial ============================================ +.. tags:: tutorials, inverse, source-localization + Here we read 4DBTi epochs data obtained with a spherical phantom using four different dipole locations. For each condition we compute evoked data and compute dipole fits. diff --git a/tutorials/inverse/95_phantom_KIT.py b/tutorials/inverse/95_phantom_KIT.py index 75e0025a9c2..625faecabea 100644 --- a/tutorials/inverse/95_phantom_KIT.py +++ b/tutorials/inverse/95_phantom_KIT.py @@ -5,6 +5,8 @@ KIT phantom dataset tutorial ============================ +.. tags:: tutorials, inverse, source-localization + Here we read KIT data obtained from a phantom with 49 dipoles sequentially activated with 2-cycle 11 Hz sinusoidal bursts to verify source localization accuracy. """ diff --git a/tutorials/io/10_reading_meg_data.py b/tutorials/io/10_reading_meg_data.py index 90ad8a44693..feb63a4b105 100644 --- a/tutorials/io/10_reading_meg_data.py +++ b/tutorials/io/10_reading_meg_data.py @@ -9,6 +9,8 @@ Importing data from MEG devices =============================== +.. tags:: tutorials, io, raw, meg + This section describes how to read data for various MEG manufacturers. .. _import-neuromag: diff --git a/tutorials/io/20_reading_eeg_data.py b/tutorials/io/20_reading_eeg_data.py index 2544e57f60c..c89272fe831 100644 --- a/tutorials/io/20_reading_eeg_data.py +++ b/tutorials/io/20_reading_eeg_data.py @@ -9,6 +9,8 @@ Importing data from EEG devices =============================== +.. tags:: tutorials, io, eeg + MNE includes various functions and utilities for reading EEG data and electrode locations. diff --git a/tutorials/io/30_reading_fnirs_data.py b/tutorials/io/30_reading_fnirs_data.py index a40935e9e19..e2c2d8901c9 100644 --- a/tutorials/io/30_reading_fnirs_data.py +++ b/tutorials/io/30_reading_fnirs_data.py @@ -9,6 +9,8 @@ Importing data from fNIRS devices ================================= +.. tags:: tutorials, io, fnirs + fNIRS devices consist of two kinds of optodes: light sources (AKA "emitters" or "transmitters") and light detectors (AKA "receivers"). Channels are defined as source-detector pairs, and channel locations are defined as the midpoint diff --git a/tutorials/io/60_ctf_bst_auditory.py b/tutorials/io/60_ctf_bst_auditory.py index 4c3249996aa..bce3d588a5a 100644 --- a/tutorials/io/60_ctf_bst_auditory.py +++ b/tutorials/io/60_ctf_bst_auditory.py @@ -5,6 +5,8 @@ Working with CTF data: the Brainstorm auditory dataset ====================================================== +.. tags:: tutorials, io, meg + Here we compute the evoked from raw for the auditory Brainstorm tutorial dataset. For comparison, see :footcite:`TadelEtAl2011` and the associated `brainstorm site diff --git a/tutorials/io/70_reading_eyetracking_data.py b/tutorials/io/70_reading_eyetracking_data.py index 15c58bd940c..70d378bc602 100644 --- a/tutorials/io/70_reading_eyetracking_data.py +++ b/tutorials/io/70_reading_eyetracking_data.py @@ -9,6 +9,8 @@ Importing Data from Eyetracking devices ======================================= +.. tags:: tutorials, io + Eyetracking devices record a persons point of gaze, usually in relation to a screen. Typically, gaze position (also referred to as eye or pupil position) and pupil size are recorded as separate channels. This section describes how to diff --git a/tutorials/machine-learning/30_strf.py b/tutorials/machine-learning/30_strf.py index eda8f90c41f..34f3ff58875 100644 --- a/tutorials/machine-learning/30_strf.py +++ b/tutorials/machine-learning/30_strf.py @@ -5,6 +5,8 @@ Spectro-temporal receptive field (STRF) estimation on continuous data ===================================================================== +.. tags:: tutorials, machine-learning + This demonstrates how an encoding model can be fit with multiple continuous inputs. In this case, we simulate the model behind a spectro-temporal receptive field (or STRF). First, we create a linear filter that maps patterns in diff --git a/tutorials/machine-learning/50_decoding.py b/tutorials/machine-learning/50_decoding.py index 30f1204598d..cd19412aa77 100644 --- a/tutorials/machine-learning/50_decoding.py +++ b/tutorials/machine-learning/50_decoding.py @@ -9,6 +9,8 @@ Decoding (MVPA) =============== +.. tags:: tutorials, decoding, machine-learning, classification + .. include:: ../../links.inc Design philosophy diff --git a/tutorials/preprocessing/10_preprocessing_overview.py b/tutorials/preprocessing/10_preprocessing_overview.py index b7679579b26..8171db7f1fc 100644 --- a/tutorials/preprocessing/10_preprocessing_overview.py +++ b/tutorials/preprocessing/10_preprocessing_overview.py @@ -5,6 +5,8 @@ Overview of artifact detection ============================== +.. tags:: tutorials, preprocessing, artifacts, beginners + This tutorial covers the basics of artifact detection, and introduces the artifact detection tools available in MNE-Python. diff --git a/tutorials/preprocessing/14_quality_control_report.py b/tutorials/preprocessing/14_quality_control_report.py index 1880ae014b2..052b140b2ab 100644 --- a/tutorials/preprocessing/14_quality_control_report.py +++ b/tutorials/preprocessing/14_quality_control_report.py @@ -5,6 +5,8 @@ Quality control (QC) reports with mne.Report ============================================ +.. tags:: tutorials, preprocessing, quality-control, report + Quality control (QC) is the process of systematically inspecting M/EEG data **throughout all stages of an analysis pipeline**, including raw data, intermediate preprocessing steps, and derived results. diff --git a/tutorials/preprocessing/15_handling_bad_channels.py b/tutorials/preprocessing/15_handling_bad_channels.py index 80b2b7315d3..c5291741172 100644 --- a/tutorials/preprocessing/15_handling_bad_channels.py +++ b/tutorials/preprocessing/15_handling_bad_channels.py @@ -5,6 +5,8 @@ Handling bad channels ===================== +.. tags:: tutorials, preprocessing + This tutorial covers manual marking of bad channels and reconstructing bad channels based on good signals at other sensors. diff --git a/tutorials/preprocessing/20_rejecting_bad_data.py b/tutorials/preprocessing/20_rejecting_bad_data.py index 749ed42d2c9..ce92ef6f5f8 100644 --- a/tutorials/preprocessing/20_rejecting_bad_data.py +++ b/tutorials/preprocessing/20_rejecting_bad_data.py @@ -5,6 +5,8 @@ Rejecting bad data spans and breaks =================================== +.. tags:: tutorials, preprocessing, artifacts, rejection, annotations + This tutorial covers: - manual marking of bad spans of data, diff --git a/tutorials/preprocessing/25_background_filtering.py b/tutorials/preprocessing/25_background_filtering.py index 537da6a22b2..86a2f39b53b 100644 --- a/tutorials/preprocessing/25_background_filtering.py +++ b/tutorials/preprocessing/25_background_filtering.py @@ -9,6 +9,8 @@ Background information on filtering =================================== +.. tags:: tutorials, preprocessing + Here we give some background information on filtering in general, and how it is done in MNE-Python in particular. Recommended reading for practical applications of digital filter design can be found in diff --git a/tutorials/preprocessing/30_filtering_resampling.py b/tutorials/preprocessing/30_filtering_resampling.py index e4ebccbc28d..35fa323f8fc 100644 --- a/tutorials/preprocessing/30_filtering_resampling.py +++ b/tutorials/preprocessing/30_filtering_resampling.py @@ -5,6 +5,8 @@ Filtering and resampling data ============================= +.. tags:: tutorials, preprocessing + This tutorial covers filtering and resampling, and gives examples of how filtering can be used for artifact repair. diff --git a/tutorials/preprocessing/35_artifact_correction_regression.py b/tutorials/preprocessing/35_artifact_correction_regression.py index 646195ae0a1..faa4a66a38d 100644 --- a/tutorials/preprocessing/35_artifact_correction_regression.py +++ b/tutorials/preprocessing/35_artifact_correction_regression.py @@ -5,6 +5,8 @@ Repairing artifacts with regression =================================== +.. tags:: tutorials, preprocessing, artifacts + This tutorial covers removal of artifacts using regression as in Gratton et al. (1983) :footcite:`GrattonEtAl1983` and Croft & Barry (2000) :footcite:`CroftBarry2000`. diff --git a/tutorials/preprocessing/40_artifact_correction_ica.py b/tutorials/preprocessing/40_artifact_correction_ica.py index 257b1f85051..caa3289315e 100644 --- a/tutorials/preprocessing/40_artifact_correction_ica.py +++ b/tutorials/preprocessing/40_artifact_correction_ica.py @@ -5,6 +5,8 @@ Repairing artifacts with ICA ============================ +.. tags:: tutorials, preprocessing, ica, artifacts + This tutorial covers the basics of independent components analysis (ICA) and shows how ICA can be used for artifact repair; an extended example illustrates repair of ocular and heartbeat artifacts. For conceptual background on ICA, see diff --git a/tutorials/preprocessing/45_projectors_background.py b/tutorials/preprocessing/45_projectors_background.py index 3c83d49d8c3..7732c3f0a25 100644 --- a/tutorials/preprocessing/45_projectors_background.py +++ b/tutorials/preprocessing/45_projectors_background.py @@ -5,6 +5,8 @@ Background on projectors and projections ======================================== +.. tags:: tutorials, preprocessing + This tutorial provides background information on projectors and Signal Space Projection (SSP), and covers loading and saving projectors, adding and removing projectors from Raw objects, the difference between "applied" and "unapplied" diff --git a/tutorials/preprocessing/50_artifact_correction_ssp.py b/tutorials/preprocessing/50_artifact_correction_ssp.py index e99a1dab7ff..a3cf9b050cb 100644 --- a/tutorials/preprocessing/50_artifact_correction_ssp.py +++ b/tutorials/preprocessing/50_artifact_correction_ssp.py @@ -5,6 +5,8 @@ Repairing artifacts with SSP ============================ +.. tags:: tutorials, preprocessing, artifacts + This tutorial covers the basics of signal-space projection (SSP) and shows how SSP can be used for artifact repair; extended examples illustrate use of SSP for environmental noise reduction, and for repair of ocular and diff --git a/tutorials/preprocessing/55_setting_eeg_reference.py b/tutorials/preprocessing/55_setting_eeg_reference.py index 770af624de7..57c4a1b31ab 100644 --- a/tutorials/preprocessing/55_setting_eeg_reference.py +++ b/tutorials/preprocessing/55_setting_eeg_reference.py @@ -5,6 +5,8 @@ Setting the EEG reference ========================= +.. tags:: tutorials, preprocessing, eeg + This tutorial describes how to set or change the EEG reference in MNE-Python. As usual we'll start by importing the modules we need, loading some diff --git a/tutorials/preprocessing/59_head_positions.py b/tutorials/preprocessing/59_head_positions.py index 4085c4792b6..e91a8f13d91 100644 --- a/tutorials/preprocessing/59_head_positions.py +++ b/tutorials/preprocessing/59_head_positions.py @@ -5,6 +5,8 @@ Extracting and visualizing subject head movement ================================================ +.. tags:: tutorials, preprocessing, meg + Continuous head movement can be encoded during MEG recordings by use of HPI coils that continuously emit sinusoidal signals. These signals can then be extracted from the recording and used to estimate head position as a function diff --git a/tutorials/preprocessing/60_maxwell_filtering_sss.py b/tutorials/preprocessing/60_maxwell_filtering_sss.py index 70d18dcde6a..a8be2051e60 100644 --- a/tutorials/preprocessing/60_maxwell_filtering_sss.py +++ b/tutorials/preprocessing/60_maxwell_filtering_sss.py @@ -5,6 +5,8 @@ Signal-space separation (SSS) and Maxwell filtering =================================================== +.. tags:: tutorials, preprocessing, meg + This tutorial covers reducing environmental noise and compensating for head movement with SSS and Maxwell filtering. diff --git a/tutorials/preprocessing/70_fnirs_processing.py b/tutorials/preprocessing/70_fnirs_processing.py index afafa9c7bdd..526ee19c1fd 100644 --- a/tutorials/preprocessing/70_fnirs_processing.py +++ b/tutorials/preprocessing/70_fnirs_processing.py @@ -5,6 +5,8 @@ Preprocessing functional near-infrared spectroscopy (fNIRS) data ================================================================ +.. tags:: tutorials, preprocessing, fnirs, artifacts + This tutorial covers how to convert functional near-infrared spectroscopy (fNIRS) data from raw measurements to relative oxyhaemoglobin (HbO) and deoxyhaemoglobin (HbR) concentration, view the average waveform, and topographic representation of the diff --git a/tutorials/preprocessing/80_opm_processing.py b/tutorials/preprocessing/80_opm_processing.py index ba44a797a51..71e85cce410 100644 --- a/tutorials/preprocessing/80_opm_processing.py +++ b/tutorials/preprocessing/80_opm_processing.py @@ -5,6 +5,8 @@ Preprocessing optically pumped magnetometer (OPM) MEG data ========================================================== +.. tags:: tutorials, preprocessing, meg + This tutorial covers preprocessing steps that are specific to :term:`OPM` MEG data. OPMs use a different sensing technology than traditional :term:`SQUID` MEG systems, which leads to several important differences for diff --git a/tutorials/preprocessing/90_eyetracking_data.py b/tutorials/preprocessing/90_eyetracking_data.py index 00f2d73381b..180bc1470cb 100644 --- a/tutorials/preprocessing/90_eyetracking_data.py +++ b/tutorials/preprocessing/90_eyetracking_data.py @@ -5,6 +5,8 @@ Working with eye tracker data in MNE-Python =========================================== +.. tags:: tutorials, preprocessing, eeg + In this tutorial we will explore simultaneously recorded eye-tracking and EEG data from a pupillary light reflex task. We will combine the eye-tracking and EEG data, and plot the ERP and pupil response to the light flashes (i.e. the pupillary light reflex). diff --git a/tutorials/raw/10_raw_overview.py b/tutorials/raw/10_raw_overview.py index 3a69230f377..fc6a1cc4c0e 100644 --- a/tutorials/raw/10_raw_overview.py +++ b/tutorials/raw/10_raw_overview.py @@ -5,6 +5,8 @@ The Raw data structure: continuous data ======================================= +.. tags:: tutorials, raw, io, beginners + This tutorial covers the basics of working with raw EEG/MEG data in Python. It introduces the :class:`~mne.io.Raw` data structure in detail, including how to load, query, subselect, export, and plot data from a :class:`~mne.io.Raw` diff --git a/tutorials/raw/20_event_arrays.py b/tutorials/raw/20_event_arrays.py index b97db396322..bc81cb2fb24 100644 --- a/tutorials/raw/20_event_arrays.py +++ b/tutorials/raw/20_event_arrays.py @@ -5,6 +5,8 @@ Working with events =================== +.. tags:: tutorials, raw, events, annotations + This tutorial describes event representation and how event arrays are used to subselect data. diff --git a/tutorials/raw/30_annotate_raw.py b/tutorials/raw/30_annotate_raw.py index 673917a13a8..25e99b9dc67 100644 --- a/tutorials/raw/30_annotate_raw.py +++ b/tutorials/raw/30_annotate_raw.py @@ -5,6 +5,8 @@ Annotating continuous data ========================== +.. tags:: tutorials, raw, annotations + This tutorial describes adding annotations to a `~mne.io.Raw` object, and how annotations are used in later stages of data processing. diff --git a/tutorials/raw/40_visualize_raw.py b/tutorials/raw/40_visualize_raw.py index 1d08017e63c..479c985277b 100644 --- a/tutorials/raw/40_visualize_raw.py +++ b/tutorials/raw/40_visualize_raw.py @@ -5,6 +5,8 @@ Built-in plotting methods for Raw objects ========================================= +.. tags:: tutorials, raw, visualization, psd, sensor-locations + This tutorial shows how to plot continuous data as a time series, how to plot the spectral density of continuous data, and how to plot the sensor locations and projectors stored in `~mne.io.Raw` objects. diff --git a/tutorials/simulation/10_array_objs.py b/tutorials/simulation/10_array_objs.py index ef3123bcf18..45a3a215e41 100644 --- a/tutorials/simulation/10_array_objs.py +++ b/tutorials/simulation/10_array_objs.py @@ -5,6 +5,8 @@ Creating MNE-Python data structures from scratch ================================================ +.. tags:: tutorials, simulation, raw, epochs, evoked + This tutorial shows how to create MNE-Python's core data structures using an existing :class:`NumPy array ` of (real or synthetic) data. diff --git a/tutorials/simulation/70_point_spread.py b/tutorials/simulation/70_point_spread.py index 485714e2c17..5ec94632549 100644 --- a/tutorials/simulation/70_point_spread.py +++ b/tutorials/simulation/70_point_spread.py @@ -5,6 +5,8 @@ Corrupt known signal with point spread ====================================== +.. tags:: tutorials, simulation, inverse, source-localization + The aim of this tutorial is to demonstrate how to put a known signal at a desired location(s) in a :class:`mne.SourceEstimate` and then corrupt the signal with point-spread by applying a forward and inverse solution. diff --git a/tutorials/simulation/80_dics.py b/tutorials/simulation/80_dics.py index aa2dbea48b9..74ca6e59d03 100644 --- a/tutorials/simulation/80_dics.py +++ b/tutorials/simulation/80_dics.py @@ -5,6 +5,8 @@ DICS for power mapping ====================== +.. tags:: tutorials, beamforming, connectivity, simulation + In this tutorial, we'll simulate two signals originating from two locations on the cortex. These signals will be sinusoids, so we'll be looking at oscillatory activity (as opposed to evoked activity). diff --git a/tutorials/stats-sensor-space/10_background_stats.py b/tutorials/stats-sensor-space/10_background_stats.py index d8f08b432a2..4f6c9f2da30 100644 --- a/tutorials/stats-sensor-space/10_background_stats.py +++ b/tutorials/stats-sensor-space/10_background_stats.py @@ -5,6 +5,8 @@ Statistical inference ===================== +.. tags:: tutorials, statistics + Here we will briefly cover multiple concepts of inferential statistics in an introductory manner, and demonstrate how to use some MNE statistical functions. """ diff --git a/tutorials/stats-sensor-space/20_erp_stats.py b/tutorials/stats-sensor-space/20_erp_stats.py index b17f16d2618..8dc864f9e1a 100644 --- a/tutorials/stats-sensor-space/20_erp_stats.py +++ b/tutorials/stats-sensor-space/20_erp_stats.py @@ -5,6 +5,8 @@ Visualising statistical significance thresholds on EEG data =========================================================== +.. tags:: tutorials, statistics, eeg + MNE-Python provides a range of tools for statistical hypothesis testing and the visualisation of the results. Here, we show a few options for exploratory and confirmatory tests - e.g., targeted t-tests, cluster-based diff --git a/tutorials/stats-sensor-space/40_cluster_1samp_time_freq.py b/tutorials/stats-sensor-space/40_cluster_1samp_time_freq.py index 60500b2fbee..1d2c4202b26 100644 --- a/tutorials/stats-sensor-space/40_cluster_1samp_time_freq.py +++ b/tutorials/stats-sensor-space/40_cluster_1samp_time_freq.py @@ -5,6 +5,8 @@ Non-parametric 1 sample cluster statistic on single trial power =============================================================== +.. tags:: tutorials, statistics, cluster-permutation, time-frequency + This script shows how to estimate significant clusters in time-frequency power estimates. It uses a non-parametric statistical procedure based on permutations and cluster level statistics. diff --git a/tutorials/stats-sensor-space/50_cluster_between_time_freq.py b/tutorials/stats-sensor-space/50_cluster_between_time_freq.py index 0b4078ec883..ee094207492 100644 --- a/tutorials/stats-sensor-space/50_cluster_between_time_freq.py +++ b/tutorials/stats-sensor-space/50_cluster_between_time_freq.py @@ -5,6 +5,8 @@ Non-parametric between conditions cluster statistic on single trial power ========================================================================= +.. tags:: tutorials, statistics, time-frequency, cluster-permutation + This script shows how to compare clusters in time-frequency power estimates between conditions. It uses a non-parametric statistical procedure based on permutations and cluster diff --git a/tutorials/stats-sensor-space/70_cluster_rmANOVA_time_freq.py b/tutorials/stats-sensor-space/70_cluster_rmANOVA_time_freq.py index c8ec0c5f0d5..ab2411f6165 100644 --- a/tutorials/stats-sensor-space/70_cluster_rmANOVA_time_freq.py +++ b/tutorials/stats-sensor-space/70_cluster_rmANOVA_time_freq.py @@ -5,6 +5,8 @@ Mass-univariate twoway repeated measures ANOVA on single trial power ==================================================================== +.. tags:: tutorials, statistics, time-frequency, cluster-permutation + This script shows how to conduct a mass-univariate repeated measures ANOVA. As the model to be fitted assumes two fully crossed factors, we will study the interplay between perceptual modality diff --git a/tutorials/stats-sensor-space/75_cluster_ftest_spatiotemporal.py b/tutorials/stats-sensor-space/75_cluster_ftest_spatiotemporal.py index 6c1e384d37d..7049c375fb8 100644 --- a/tutorials/stats-sensor-space/75_cluster_ftest_spatiotemporal.py +++ b/tutorials/stats-sensor-space/75_cluster_ftest_spatiotemporal.py @@ -5,6 +5,8 @@ Spatiotemporal permutation F-test on full sensor data ===================================================== +.. tags:: tutorials, statistics, cluster-permutation + Tests for differential evoked responses in at least one condition using a permutation clustering test. The FieldTrip neighbor templates will be used to determine the adjacency between sensors. This serves as a spatial prior to the clustering. diff --git a/tutorials/stats-source-space/20_cluster_1samp_spatiotemporal.py b/tutorials/stats-source-space/20_cluster_1samp_spatiotemporal.py index 53e90f78d01..4ffe8a316e9 100644 --- a/tutorials/stats-source-space/20_cluster_1samp_spatiotemporal.py +++ b/tutorials/stats-source-space/20_cluster_1samp_spatiotemporal.py @@ -5,6 +5,8 @@ Permutation t-test on source data with spatio-temporal clustering ================================================================= +.. tags:: tutorials, statistics, cluster-permutation, source-localization + This example tests if the evoked response is significantly different between two conditions across subjects. Here just for demonstration purposes we simulate data from multiple subjects using one subject's data. diff --git a/tutorials/stats-source-space/30_cluster_ftest_spatiotemporal.py b/tutorials/stats-source-space/30_cluster_ftest_spatiotemporal.py index fbf7f990bd8..90a610fef9d 100644 --- a/tutorials/stats-source-space/30_cluster_ftest_spatiotemporal.py +++ b/tutorials/stats-source-space/30_cluster_ftest_spatiotemporal.py @@ -5,6 +5,8 @@ 2 samples permutation test on source data with spatio-temporal clustering ========================================================================= +.. tags:: tutorials, statistics, cluster-permutation, source-localization + Tests if the source space data are significantly different between 2 groups of subjects (simulated here using one subject's data). The multiple comparisons problem is addressed with a cluster-level diff --git a/tutorials/stats-source-space/60_cluster_rmANOVA_spatiotemporal.py b/tutorials/stats-source-space/60_cluster_rmANOVA_spatiotemporal.py index 6a7ef05123c..973e03bfa1f 100644 --- a/tutorials/stats-source-space/60_cluster_rmANOVA_spatiotemporal.py +++ b/tutorials/stats-source-space/60_cluster_rmANOVA_spatiotemporal.py @@ -5,6 +5,8 @@ Repeated measures ANOVA on source data with spatio-temporal clustering ====================================================================== +.. tags:: tutorials, statistics, cluster-permutation, source-localization + This example illustrates how to make use of the clustering functions for arbitrary, self-defined contrasts beyond standard t-tests. In this case we will tests if the differences in evoked responses between diff --git a/tutorials/time-freq/10_spectrum_class.py b/tutorials/time-freq/10_spectrum_class.py index 0756e76193f..363cdd64a63 100644 --- a/tutorials/time-freq/10_spectrum_class.py +++ b/tutorials/time-freq/10_spectrum_class.py @@ -10,6 +10,8 @@ The Spectrum and EpochsSpectrum classes: frequency-domain data ============================================================== +.. tags:: tutorials, time-frequency, spectrum, psd, epochs + This tutorial shows how to create and visualize frequency-domain representations of your data, starting from continuous :class:`~mne.io.Raw`, discontinuous :class:`~mne.Epochs`, or averaged :class:`~mne.Evoked` data. diff --git a/tutorials/time-freq/20_sensors_time_frequency.py b/tutorials/time-freq/20_sensors_time_frequency.py index 9175e700041..481c0529cc7 100644 --- a/tutorials/time-freq/20_sensors_time_frequency.py +++ b/tutorials/time-freq/20_sensors_time_frequency.py @@ -5,6 +5,8 @@ Frequency and time-frequency sensor analysis ============================================ +.. tags:: tutorials, time-frequency, epochs + The objective is to show you how to explore the spectral content of your data (frequency and time-frequency). Here we'll work on Epochs. diff --git a/tutorials/time-freq/50_ssvep.py b/tutorials/time-freq/50_ssvep.py index a625a001d9e..03218f30e2d 100644 --- a/tutorials/time-freq/50_ssvep.py +++ b/tutorials/time-freq/50_ssvep.py @@ -5,6 +5,8 @@ Frequency-tagging: Basic analysis of an SSVEP/vSSR dataset ========================================================== +.. tags:: tutorials, time-frequency + In this tutorial we compute the frequency spectrum and quantify signal-to-noise ratio (SNR) at a target frequency in EEG data recorded during fast periodic visual stimulation (FPVS) at 12 Hz and 15 Hz in different trials. diff --git a/tutorials/visualization/10_publication_figure.py b/tutorials/visualization/10_publication_figure.py index c13ae47b774..8fdb5570f4e 100644 --- a/tutorials/visualization/10_publication_figure.py +++ b/tutorials/visualization/10_publication_figure.py @@ -5,6 +5,8 @@ Make figures more publication ready =================================== +.. tags:: tutorials, visualization, evoked + In this example, we show several use cases to take MNE plots and customize them for a more publication-ready look. """ diff --git a/tutorials/visualization/20_ui_events.py b/tutorials/visualization/20_ui_events.py index 879f04c8b6a..faa68532b10 100644 --- a/tutorials/visualization/20_ui_events.py +++ b/tutorials/visualization/20_ui_events.py @@ -5,6 +5,8 @@ Using the event system to link figures ====================================== +.. tags:: tutorials, visualization, ui-events + Many of MNE-Python's figures are interactive. For example, you can select channels or scroll through time. The event system allows you to link figures together so that interacting with one figure will simultaneously update another figure. From d8ae235cdae9fd5ab796a87428793e02b6c7dfeb Mon Sep 17 00:00:00 2001 From: Aniket Singh Yadav Date: Sun, 15 Mar 2026 18:49:09 +0000 Subject: [PATCH 2/4] add sphinx-tags support and tag documentation, tutorials, and examples --- doc/help/faq.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/help/faq.rst b/doc/help/faq.rst index 56b94abc0fc..f2e799686f7 100644 --- a/doc/help/faq.rst +++ b/doc/help/faq.rst @@ -31,7 +31,7 @@ If PyVista plotting in Jupyter Notebooks doesn't work well, using the IPython magic ``%gui qt`` should `help `_. -.. code-block:: pycon +.. code-block:: ipython %gui qt From 203a72300e0d0d85f1903dbed0e59bb7f9d0bf6c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 15 Mar 2026 18:54:47 +0000 Subject: [PATCH 3/4] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8492985492e..3cf78337806 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -322,9 +322,9 @@ ignore_directives = [ "minigallery", "related-software", "rst-class", - "tags", "tab-set", "tabularcolumns", + "tags", "toctree", "towncrier-draft-entries", ] From 5b8f915b93e9f997489db36de7ac4c586848a35b Mon Sep 17 00:00:00 2001 From: Aniket Singh Yadav Date: Sat, 21 Mar 2026 11:38:10 +0000 Subject: [PATCH 4/4] add changelog entry --- doc/changes/dev/13757.other.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/changes/dev/13757.other.rst diff --git a/doc/changes/dev/13757.other.rst b/doc/changes/dev/13757.other.rst new file mode 100644 index 00000000000..f98e4371f96 --- /dev/null +++ b/doc/changes/dev/13757.other.rst @@ -0,0 +1 @@ +Enable sphinx-tags and add tags to docs, tutorials, and examples. by `Aniket Singh Yadav`_. \ No newline at end of file