diff --git a/.github/generate_codeowners.py b/.github/generate_codeowners.py new file mode 100644 index 000000000..476f26852 --- /dev/null +++ b/.github/generate_codeowners.py @@ -0,0 +1,222 @@ +import os +import pandas as pd +import re +import requests +import logging +import time +from github import Github +import json + +# Set display options +pd.set_option("display.max_columns", None) +pd.set_option("display.max_rows", None) + +# Configure the logger +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') + + +def authenticate_github_session(): + """ + Authenticate a GitHub session using a personal access token. + + Returns: + A requests.Session object configured with the necessary headers. + """ + gh_session = requests.Session() + + if 'GITHUB_TOKEN' in os.environ: + # Use the inbuilt GITHUB_TOKEN for authentication + gh_session.headers.update({'Authorization': f'token {os.environ["GITHUB_TOKEN"]}'}) + else: + # Use your personal token for authentication + #gh_session.headers.update({'Authorization': f'token {github_token}'}) + os.exit(1) + return gh_session + +def get_repository_files(gh_session): + """ + Retrieve the list of files in a GitHub repository. + + Args: + gh_session (requests.Session): A GitHub session. + + Returns: + list: A list of file metadata. + """ + url = "https://api.github.com/repos/Slicer/ExtensionsIndex/contents" + while True: + response = gh_session.get(url) + if response.status_code == 200: + return response.json() + else: + logging.error("Failed to get repository files. Retrying in 5 minutes...") + time.sleep(300) + +def get_contributors(gh_session, github_repo_api): + """ + Get contributors for a GitHub repository. + + Args: + gh_session (requests.Session): A GitHub session. + github_repo_api (str): API URL of the GitHub repository. + + Returns: + list: A list of contributors. + """ + contributors_url = f"{github_repo_api}/contributors" + while True: + response = gh_session.get(contributors_url) + if response.status_code == 200: + return response.json() + else: + logging.error(f"Failed to get contributors for {github_repo_api}. Retrying in 5 minutes...") + time.sleep(300) + +def get_pull_requests(gh_session, github_repo_api): + """ + Get closed pull requests for a GitHub repository. + + Args: + gh_session (requests.Session): A GitHub session. + github_repo_api (str): API URL of the GitHub repository. + + Returns: + list: A list of closed pull requests. + """ + pull_requests_url = f"{github_repo_api}/pulls?state=closed" + while True: + response = gh_session.get(pull_requests_url) + if response.status_code == 200: + return response.json() + else: + logging.error(f"Failed to get pull requests for {github_repo_api}. Retrying in 5 minutes...") + time.sleep(300) + +def get_commit(gh_session, commit_url): + """ + Get commit data from a GitHub commit URL. + + Args: + gh_session (requests.Session): A GitHub session. + commit_url (str): URL of the GitHub commit. + + Returns: + dict: Commit data. + """ + while True: + response = gh_session.get(commit_url) + if response.status_code == 200: + return response.json() + else: + logging.error(f"Failed to get commit data from {commit_url}. Status code: {response.status_code}. Retrying in 5 minutes...") + time.sleep(300) + +def determine_point_of_contact(gh_session, extension_name, extension_file_content): + """ + Determine the point of contact (POC) for a GitHub extension file. + + Args: + gh_session (requests.Session): A GitHub session. + extension_name (str): Name of the extension. + extension_file_content (str): Content of the extension file. + + Returns: + str: The point of contact (GitHub username) or None if not found. + """ + point_of_contact = None + # Find the scmurl line + scmurl_line = re.search(r'^scmurl.*$', extension_file_content, re.MULTILINE) + + if scmurl_line is not None: + # Get the GitHub repo URL + github_repo = scmurl_line.group().split(' ')[1] + + # Replace github.com with api.github.com/repos in the URL + github_repo_api = github_repo.replace('github.com', 'api.github.com/repos') + + # Remove .git from the end of the URL if it's present + if github_repo_api.endswith('.git'): + github_repo_api = github_repo_api[:-4] + + # Check if it's not another repository + if "github.com" in github_repo: + contributors = get_contributors(gh_session, github_repo_api) + pull_requests = get_pull_requests(gh_session, github_repo_api) + + # Check if there is only one contributor + if len(contributors) == 1 or len(pull_requests) == 0: + point_of_contact = contributors[0]['login'] + logging.info("Found number of contributors: " + str(len(contributors))) + logging.info("Point of contact: " + point_of_contact) + else: + # Find the closed pull requests for the repository + if pull_requests: + # Find the latest closed pull request + latest_pull_request = pull_requests[0] + latest_pull_request_number = latest_pull_request['number'] + if latest_pull_request['merge_commit_sha'] is not None: + merge_commit_sha = latest_pull_request['merge_commit_sha'] + + # Get the merge commit for the latest closed pull request + commit_url = f"{github_repo_api}/commits/{merge_commit_sha}" + commit = get_commit(gh_session, commit_url) + committer = commit['committer']['login'] + if commit['committer']['login'] == 'web-flow': + if commit['author'] is not None: + committer = commit['author']['login'] + else: + committer = None + point_of_contact = committer + logging.info("Found more than one contributor, so determining point of contact by whoever accepted the latest pull request \n") + if point_of_contact is not None: + logging.info("Point of contact: " + point_of_contact) + else: + logging.info("Point of contact is not available.") + + return point_of_contact + +def process_extensions(): + """ + Process GitHub extension files, determine the point of contact for each extension, and return the data as a DataFrame. + + Returns: + pandas.DataFrame: A DataFrame containing extension data with ExtensionName, PointOfContact, and ExtensionPath. + """ + gh_session = authenticate_github_session() + extensions_data = [] + + files = get_repository_files(gh_session) + + for file_meta_data in files: + if file_meta_data['name'].endswith('.s4ext'): + extension_path = file_meta_data['html_url'].split('/blob/main')[1] + extension_name = file_meta_data['name'][:-6] + logging.info("Processing extension: " + extension_name) + # Get the content of the .s4ext file + extension_file_content = gh_session.get(file_meta_data['download_url']).text + point_of_contact = determine_point_of_contact(gh_session, extension_name, extension_file_content) + extensions_data.append({'ExtensionName': extension_name, 'PointOfContact': point_of_contact, 'ExtensionPath': extension_path}) + + df = pd.DataFrame(extensions_data) + return df + +result_df = process_extensions() + +def generate_codeowners_file(extension_data, output_file="CODEOWNERS"): + """ + Generate a CODEOWNERS file using the extension data and write it to a specified output file. + + Args: + extension_data (pandas.DataFrame): A DataFrame containing extension data. + output_file (str): The name of the output CODEOWNERS file. + """ + with open(output_file, 'w') as codeowners_file: + for index, row in extension_data.iterrows(): + point_of_contact = row['PointOfContact'] + if point_of_contact is not None: + codeowners_file.write(f"{row['ExtensionPath']} {'@' + point_of_contact}\n") + +if __name__ == "__main__": + + generate_codeowners_file(result_df, output_file="NEW_CODEOWNERS") + logging.info('Generating CODEOWNERS file \n \n') diff --git a/.github/notify_extension_managers.py b/.github/notify_extension_managers.py new file mode 100644 index 000000000..c3af74ad9 --- /dev/null +++ b/.github/notify_extension_managers.py @@ -0,0 +1,214 @@ +import os +import pandas as pd +import re +import requests +import logging +import time +from github import Github +import json + + +# Configure the logger +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') + +# Get the GitHub token from the environment variable +access_token = os.environ.get('GITHUB_TOKEN') + +g = Github(access_token) +repo = g.get_repo("Slicer/ExtensionsIndex") + +logging.info("Calling slicer cdash api to get the build info for all extensions") +# API data retrieval and processing +api_url = f"https://slicer.cdash.org/api/v1/index.php?project=SlicerPreview" +response = requests.get(api_url) + +if response.status_code == 200: + api_result = response.json() + api_call_time = api_result["datetime"] + build_groups = api_result["buildgroups"] + + api_data = [] + for item in build_groups: + builds = item['builds'] + for build in builds: + build_data = { + "id": item["id"], + "name": item["name"], + "label": build["label"], + "APICallTime": api_call_time, + "BuildTriggerTime": build["builddate"], + "BuildName": build["buildname"], + "BuildPlatform": build.get("buildplatform", None), + "ConfigureErrors": build.get("configure", {}).get("error", 0), + "ConfigureWarnings": build.get("configure", {}).get("warning", 0), + "HasCompilationData": build.get("hascompilation", False), + "CompilationErrors": build.get("compilation", {}).get("error", 0), + "CompilationWarnings": build.get("compilation", {}).get("warning", 0), + "HasTestData": build.get("hastest", False), + "TestNotRun": build.get("test", {}).get("notrun", 0), + "TestFail": build.get("test", {}).get("fail", 0), + "TestPass": build.get("test", {}).get("pass", 0), + } + api_data.append(build_data) + + api_df = pd.DataFrame(api_data) + api_df['ErrorSum'] = api_df['ConfigureErrors'] + api_df['CompilationErrors'] + api_df['TestFail'] + api_df['WarningSum'] = api_df['ConfigureWarnings'] + api_df['CompilationWarnings'] +else: + logging.error(f"Failed to retrieve data. Status code: {response.status_code}") + +# Read the existing CODEOWNERS file +with open('CODEOWNERS', 'r') as file: + existing_codeowners = [line.strip() for line in file if line.strip()] + +# Read the NEW_CODEOWNERS file +with open('NEW_CODEOWNERS', 'r') as file: + new_codeowners = [line.strip() for line in file if line.strip()] + +# Extract existing extensions from the CODEOWNERS file +existing_extensions = [line.split(' ')[0] for line in existing_codeowners] + +# Find new extensions present in NEW_CODEOWNERS but not in CODEOWNERS +new_extensions = [line for line in new_codeowners if line.split(' ')[0] not in existing_extensions] + +# Append only the new extensions to the existing CODEOWNERS file +with open('CODEOWNERS', 'a') as file: + for line in new_extensions: + file.write(line + '\n') + +# Read the file +with open('CODEOWNERS', 'r') as file: + lines = [line.strip() for line in file if line.strip()] + +data = [] + +pattern = r"/(.*)\.s4ext (@.*)" + +for line in lines: + match = re.search(pattern, line) + if match: + extension_name = match.group(1) + poc = match.group(2) + data.append([extension_name, poc]) + +codeowners_df = pd.DataFrame(data, columns=['ExtensionName', 'POC']) +merged_api_codeowners_df = api_df.merge(codeowners_df, left_on='label', right_on='ExtensionName', how='left').reset_index(drop=True).sort_values(by='label') + +# Pivot the columns for data aggregation +columns_to_pivot = [ + 'BuildName', 'ConfigureErrors', 'ConfigureWarnings', 'HasCompilationData', + 'CompilationErrors', 'CompilationWarnings', 'HasTestData', 'TestNotRun', + 'TestFail', 'TestPass', 'ErrorSum', 'WarningSum' +] + +for column in columns_to_pivot: + pivot = merged_api_codeowners_df.pivot_table(values=column, index='ExtensionName', columns='BuildPlatform', aggfunc='first', fill_value='Null') + pivot.columns = [f'{col}_{column}' for col in pivot.columns] + merged_api_codeowners_df = pd.merge(merged_api_codeowners_df, pivot, on='ExtensionName') + +# Final data cleaning and compilation summary +merged_api_codeowners_df = merged_api_codeowners_df.drop(columns=columns_to_pivot + ['BuildPlatform']) +merged_api_codeowners_df = merged_api_codeowners_df.drop_duplicates(subset='ExtensionName') +merged_api_codeowners_df = merged_api_codeowners_df.reset_index(drop=True) + +error_columns_df = merged_api_codeowners_df[['windows_ErrorSum', 'linux_ErrorSum', 'mac_ErrorSum']].apply(pd.to_numeric, errors='coerce') +warning_columns_df = merged_api_codeowners_df[['windows_WarningSum', 'linux_WarningSum', 'mac_WarningSum']].apply(pd.to_numeric, errors='coerce') + +merged_api_codeowners_df['TotalErrors'] = error_columns_df.sum(axis=1) +merged_api_codeowners_df['TotalWarnings'] = warning_columns_df.sum(axis=1) + +# Create JSON output for extensions with issues +extensions_with_issues_df = merged_api_codeowners_df[(merged_api_codeowners_df['TotalErrors'] > 0) | (merged_api_codeowners_df['TotalWarnings'] > 0)] +json_list = [] + +for index, row in extensions_with_issues_df.iterrows(): + extension_name = row['ExtensionName'] + header = f"{row['name']} {row['ExtensionName']}" + poc = row['POC'] + errors = row['TotalErrors'] + warnings = row['TotalWarnings'] + + issue_body = f"Hi {poc},\n\nDuring today's build of your extension, I found {errors} error(s) and {warnings} warning(s).\n\nHere's the breakdown by your extension's target platform:\n\n" + + platforms = ['linux', 'mac', 'windows'] + + for platform in platforms: + platform_build = row[f"{platform}_BuildName"] + platform_errors = row[f"{platform}_ErrorSum"] + platform_warnings = row[f"{platform}_WarningSum"] + + if platform_errors > 0 or platform_warnings > 0: + issue_body += f"- {platform} ({platform_build}):\n" + categories = { + 'Configure': f"{platform}_ConfigureErrors", + 'ConfigureWarnings': f"{platform}_ConfigureWarnings", + 'Compilation': f"{platform}_CompilationErrors", + 'CompilationWarnings': f"{platform}_CompilationWarnings", + 'TestFail': f"{platform}_TestFail" + } + + for category, col in categories.items(): + category_errors = row[col] + if category_errors > 0: + issue_body += f" - {category}: {category_errors}" + if category == 'Configure' and row.get(f"{platform}_ConfigureWarnings"): + issue_body += f", Warnings({row[f'{platform}_ConfigureWarnings']})" + if category == 'Compilation' and row.get(f"{platform}_CompilationWarnings"): + issue_body += f", Warnings({row[f'{platform}_CompilationWarnings']})" + if category == 'TestFail' and row.get(f"{platform}_TestWarnings"): + issue_body += f", Warnings({row[f'{platform}_TestWarnings']})" + issue_body += '\n' + + issue = { + "extension_name": extension_name, + "poc": poc, + "issue_header": header, + "issue_body": issue_body + } + json_list.append(issue) + +json_output = json.dumps(json_list, indent=2) + +# # Parse the JSON output +issues = json.loads(json_output) + +extensions_with_issues_list = [issue['extension_name'] for issue in issues] + +existing_issues = repo.get_issues(state="open") # Fetch all open issues + +for index, row in merged_api_codeowners_df.iterrows(): + header = f"{row['name']} {row['ExtensionName']}" + extension_name = row['ExtensionName'] + # Check if the issue title matches the pattern and is not in the list + for issue in existing_issues: + if header in issue.title and extension_name not in extensions_with_issues_list: + logging.info(f'Closing issue for {extension_name}') + # Add a comment + issue.create_comment("No errors or warnings found anymore, so closing this issue.") + # Close the issue + issue.edit(state='closed') + + +for issue in issues: + header = issue['issue_header'] + body = issue['issue_body'] + extension_name = issue['extension_name'] + + existing_issue = None + for existing in existing_issues: + if existing.title == header: + existing_issue = existing + break + + if existing_issue is None: + while True: + try: + new_issue = repo.create_issue(title=header, body=body) + logging.info(f"Issue created for {extension_name} - {new_issue.html_url}") + time.sleep(30) + break + except Exception as e: + logging.error(header) + logging.error(body) + logging.error(f"Failed to create issue: {e}") + time.sleep(30) diff --git a/.github/workflows/notify_code_owners.yaml b/.github/workflows/notify_code_owners.yaml new file mode 100644 index 000000000..c50220ab0 --- /dev/null +++ b/.github/workflows/notify_code_owners.yaml @@ -0,0 +1,30 @@ +name: Extensions Notifier + +on: + workflow_dispatch: + schedule: + - cron: 0 12 */1 * * + +jobs: + notifier: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + + - name: Install dependencies + run: pip install requests==2.31.0 pandas==2.1.1 logging==0.4.9.6 PyGithub==2.1.1 + + - name: Run Python script to calculate errors and notify + run: | + python .github/generate_codeowners.py + python .github/notify_extension_managers.py + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 000000000..fe6156079 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,172 @@ +/ABLTemporalBoneSegmentation.s4ext @ben-connors +/AblationPlanner.s4ext @naterex23 +/AirwaySegmentation.s4ext @PietroNardelli +/AnglePlanesExtension.s4ext @allemangD +/AnomalousFiltersExtension.s4ext @acsenrafilho +/ArduinoController.s4ext @pzaffino +/AstmPhantomTest.s4ext @atracsys-sbt +/AutomatedDentalTools.s4ext @allemangD +/BigImage.s4ext @gaoyi +/BoneReconstructionPlanner.s4ext @mauigna06 +/BoneTextureExtension.s4ext @jcfr +/BoneThicknessMapping.s4ext @e-simpson +/BrainVolumeRefinement.s4ext @acsenrafilho +/BreastImplantAnalyzer.s4ext @lancelevine +/BreastUltrasoundAnalysis.s4ext @ZhyBrian +/Breast_DCEMRI_FTV.s4ext @rnadkarni2 +/CMFreg.s4ext @allemangD +/CarreraSlice.s4ext @ljzhu +/ChangeTracker.s4ext @fedorov +/Chest_Imaging_Platform.s4ext @acil-bwh +/CleverSeg.s4ext @JonathanRamos +/ColocZStats.s4ext @ChenXiang96 +/CurveMaker.s4ext @tokjun +/DCMQI.s4ext @fedorov +/DICOMwebBrowser.s4ext @vanossj +/DRRGenerator.s4ext @lassoan +/DSCMRIAnalysis.s4ext @fedorov +/DatabaseInteractor.s4ext @jcfr +/DebuggingTools.s4ext @lassoan +/DeveloperToolsForExtensions.s4ext @jcfr +/DiffusionQC.s4ext @tashrifbillah +/EasyClip.s4ext @jcfr +/ErodeDilateLabel.s4ext @lorensen +/FiducialsToModelDistance.s4ext @ReynoldsJ20 +/FilmDosimetryAnalysis.s4ext @cpinter +/GelDosimetryAnalysis.s4ext @cpinter +/GeodesicSlicer.s4ext @FredericBr +/GyroGuide.s4ext @pqpqpqpqpq +/HDBrainExtraction.s4ext @lassoan +/IDCBrowser.s4ext @fedorov +/ImageCompare.s4ext @pzaffino +/ImageMaker.s4ext @finetjul +/IntensitySegmenter.s4ext @juanprietob +/KidneyStoneCalculator.s4ext @fredericpanthier +/LanguagePacks.s4ext @lassoan +/LungCTAnalyzer.s4ext @rbumm +/MEMOS.s4ext @smrolfe +/MHubRunner.s4ext @LennyN95 +/MONAILabel.s4ext @diazandr3s +/MONAIViz.s4ext @pre-commit-ci[bot] +/MRUSLandmarking.s4ext @koegl +/MUST-segmenter.s4ext @kyliekeijzer +/MarkupsToModel.s4ext @lassoan +/MatlabBridge.s4ext @lassoan +/MedialSkeleton.s4ext @che85 +/MeshStatisticsExtension.s4ext @allemangD +/MeshToLabelMap.s4ext @allemangD +/ModelClip.s4ext @jamesobutler +/ModelCropper.s4ext @sebastianandress +/ModelToModelDistance.s4ext @jcfr +/NeedleFinder.s4ext @gpernelle +/OrthodonticAnalysis.s4ext @OrthodonticAnalysis +/OsteotomyPlanner.s4ext @sjh26 +/PBNRR.s4ext @aangelos28 +/PET-IndiC.s4ext @jcfr +/PETCPhantom.s4ext @chribaue +/PETDICOMExtension.s4ext @fedorov +/PETLiverUptakeMeasurement.s4ext @chribaue +/PETTumorSegmentation.s4ext @chribaue +/ParallelProcessing.s4ext @pieper +/PathReconstruction.s4ext @tavaughan +/PedicleScrewSimulator.s4ext @jumbojing +/PercutaneousApproachAnalysis.s4ext @ayamada0614 +/PerkTutor.s4ext @ungi +/PetSpectAnalysis.s4ext @gti-fing +/PickAndPaintExtension.s4ext @allemangD +/Pipelines.s4ext @jcfr +/PkModeling.s4ext @fedorov +/PortPlacement.s4ext @giogadi +/PyTorch.s4ext @lassoan +/Q3DC.s4ext @allemangD +/QuantitativeReporting.s4ext @fedorov +/RVXLiverSegmentation.s4ext @Thibault-Pelletier +/RVXVesselnessFilters.s4ext @Thibault-Pelletier +/RawImageGuess.s4ext @lassoan +/RegistrationQA.s4ext @gsi-kanderle +/ResectionPlanner.s4ext @lassoan +/SNRMeasurement.s4ext @tokjun +/SPHARM-PDM.s4ext @slupok +/Sandbox.s4ext @lassoan +/ScatteredTransform.s4ext @grandwork2 +/Scoliosis.s4ext @BenChurch +/SegmentEditorExtraEffects.s4ext @lassoan +/SegmentMesher.s4ext @lassoan +/SegmentRegistration.s4ext @lassoan +/SegmentationAidedRegistration.s4ext @gaoyi +/SegmentationReview.s4ext @zapaishchykova +/SequenceRegistration.s4ext @moselhy +/ShapePopulationViewer.s4ext @allemangD +/ShapeRegressionExtension.s4ext @jcfr +/ShapeVariationAnalyzer.s4ext @jcfr +/SkeletalRepresentation.s4ext @vicory +/SkinMouldGenerator.s4ext @lassoan +/SkullStripper.s4ext @jcfr +/SlicerAIGT.s4ext @ungi +/SlicerANTs.s4ext @simonoxen +/SlicerAutoscoperM.s4ext @jcfr +/SlicerAutoscroll.s4ext @moselhy +/SlicerBatchAnonymize.s4ext @hina-shah +/SlicerBiomech.s4ext @jmhuie +/SlicerCMF.s4ext @allemangD +/SlicerCaseIterator.s4ext @JoostJM +/SlicerCervicalSpine.s4ext @idhamari +/SlicerDMRI.s4ext @ljod @zhangfanmark +/SlicerDcm2nii.s4ext @ljod +/SlicerDensityLungSegmentation.s4ext @pzaffino +/SlicerDentalModelSeg.s4ext @luciacev +/SlicerDevelopmentToolbox.s4ext @fedorov +/SlicerElastix.s4ext @lassoan +/SlicerFab.s4ext @pieper +/SlicerFreeSurfer.s4ext @Sunderlandkyl +/SlicerHeart.s4ext @Sunderlandkyl +/SlicerIGSIO.s4ext @Sunderlandkyl +/SlicerIGT.s4ext @Sunderlandkyl +/SlicerITKUltrasound.s4ext @dzenanz +/SlicerJupyter.s4ext @jcfr +/SlicerLayoutButtons.s4ext @che85 +/SlicerLiver.s4ext @RafaelPalomar +/SlicerLookingGlass.s4ext @jcfr +/SlicerMarkupConstraints.s4ext @allemangD +/SlicerMorph.s4ext @muratmaga +/SlicerNetstim.s4ext @simonoxen +/SlicerNeuro.s4ext @lassoan +/SlicerNeuroSegmentation.s4ext @Sunderlandkyl +/SlicerOpenAnatomy.s4ext @andy9t7 +/SlicerOpenCV.s4ext @jcfr +/SlicerOpenIGTLink.s4ext @Sunderlandkyl +/SlicerProstate.s4ext @fedorov +/SlicerProstateAblation.s4ext @che85 +/SlicerRT.s4ext @cpinter +/SlicerRadiomics.s4ext @fedorov +/SlicerRegularizedFastMarching.s4ext @AldrickF +/SlicerTissueSegmentation.s4ext @MarinaSandonis +/SlicerToKiwiExporter.s4ext @jcfr +/SlicerVMTK.s4ext @chir-set +/SlicerVirtualMouseCursor.s4ext @LucasGandel +/SlicerVirtualReality.s4ext @jcfr +/SlicerWMA.s4ext @jcfr +/SoundControl.s4ext @lassoan +/Stereotaxia.s4ext @lassoan +/SurfaceFragmentsRegistration.s4ext @sebastianandress +/SurfaceMarkup.s4ext @cpinter +/SurfaceWrapSolidify.s4ext @sebastianandress +/SwissSkullStripper.s4ext @lorensen +/T1Mapping.s4ext @stevedaxiao +/T1_ECVMapping.s4ext @RivettiLuciano +/T2mapping.s4ext @gattia +/TCIABrowser.s4ext @kirbyju +/TITAN.s4ext @sindhurathiru +/TOMAAT.s4ext @faustomilletari +/TomoSAM.s4ext @fsemerar +/TorchIO.s4ext @che85 +/TotalSegmentator.s4ext @lassoan +/TrackingErrorInspector.s4ext @ungi +/UKFTractography.s4ext @tashrifbillah +/VASSTAlgorithms.s4ext @adamrankin +/VolumeClip.s4ext @lassoan +/XNATSlicer.s4ext @rherrick +/ZFrameRegistration.s4ext @leochan2009 +/iGyne.s4ext @gpernelle +/mpReview.s4ext @deepakri201 +/slicerPRISMRendering.s4ext @cmll2