var bibbase_data = {"data":"\n\n
\n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fjwbaugh.github.io%2Fjwb.bib&jsonp=1&jsonp=1\"></script>\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fjwbaugh.github.io%2Fjwb.bib&jsonp=1\");\n print_r($contents);\n ?>\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fjwbaugh.github.io%2Fjwb.bib&jsonp=1\"></iframe>\n
\n \n For more details see the documention.\n
\nTo the site owner:
\n\nAction required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n
\n \n \n Fix it now\n
\n@inproceedings{altuntas-vss-2025,\nauthor = {Alper Altuntas and Allison H. Baker and John Baugh and Ganesh Gopalakrishnan and Stephen Siegel},\ntitle = {Specification and Verification for Climate Modeling: Formalization Leading to Impactful Tooling},\nbooktitle = {Verification of Scientific Software (VSS 2025), a workshop of ETAPS 2025: European Joint Conferences on Theory and Practice of Software},\nyear = {2025},\nmonth = may,\naddress = {Hamilton, Ontario, Canada},\nurl_pdf = {papers/altuntas-vss-2025.pdf},\nabstract = {\nEarth System Models (ESMs) are critical for understanding past\nclimates and projecting future scenarios. However, the complexity of\nthese models, which include large code bases, a wide community of\ndevelopers, and diverse computational platforms, poses significant\nchallenges for software quality assurance. The increasing adoption of\nGPUs and heterogeneous architectures further complicates verification\nefforts. Traditional verification methods often rely on bitwise\nreproducibility, which is not always feasible, particularly under new\ncompilers or hardware. Manual expert evaluation, on the other hand, is\nsubjective and time-consuming. Formal methods offer a mathematically\nrigorous alternative, yet their application in ESM development has\nbeen limited due to the lack of climate model-specific representations\nand tools. Here, we advocate for the broader adoption of formal\nmethods in climate modeling. In particular, we identify key aspects of\nESMs that are well suited to formal specification and introduce\nabstraction approaches for a tailored framework. To demonstrate this\napproach, we present a case study using CIVL model checker to formally\nverify a bug fix in an ocean mixing parameterization scheme. Our goal\nis to develop accessible, domain-specific formal tools that enhance\nmodel confidence and support more efficient and reliable ESM\ndevelopment.}\n}\n\n\n
@inproceedings{wilson-rtas-2025,\nauthor={Wilson, Kurt and Al Arafat, Abdullah and Baugh, John and Yu, Ruozhou and Guo, Zhishan},\nbooktitle={2025 IEEE 31st Real-Time and Embedded Technology and Applications Symposium (RTAS)}, \ntitle={Physics-Informed Mixed-Criticality Scheduling for {F1Tenth} Cars with Preemptable {ROS 2} Executors},\nyear={2025},\npages={215-227},\nkeywords={Mixed-Criticality Systems;Temporal Verification;Formal Methods;F1Tenth cars;UPPAAL},\ndoi={10.1109/RTAS65571.2025.00030},\nurl_pdf = {papers/wilson-rtas-2025.pdf},\nabstract={\nAutonomous systems are increasingly used in safety-critical domains,\nincluding industrial automation, autonomous vehicles, and the\nindustrial Internet of Things. Verifying both the functional and\ntemporal correctness of these systems is essential to ensure safety\nbefore deployment. However, end-to-end verification is challenging due\nto the interaction of continuous-time physical processes with\ndiscrete-time computational systems. Existing formal methods often\nassume simplified or static computational models, while traditional\nreal-time systems focus on meeting timing constraints without\nexplicitly linking them to physical safety. We address this gap by\nproposing a physics-informed mixed-criticality (MC) verification\nframework for cyber-physical systems, which allows the integration of\ncomputational and physical models for dynamic, fine-grained safety\nassurance. Our framework incorporates feedback from the local\nenvironment to guide criticality-based mode switching, ensuring\nadaptive responses to real-time physical states rather than relying on\nglobal worst-case assumptions. We demonstrate the feasibility of our\napproach with a prototype implementation on an autonomous F1 Tenth\nvehicle using preemptive EDF scheduling on ROS 2. Verification is\nconducted using UPPAAL to validate system behavior, mode transitions,\nand physical safety constraints. Results show that our framework\neffectively manages MC requirements, enhancing responsiveness and\nsafety in dynamic environments.}\n}\n\n\n
@inproceedings{wilson-hscc-2025,\nauthor = {Wilson, Kurt and Arafat, Abdullah Al and Baugh, John and Yu, Ruozhou and Liu, Xue and Guo, Zhishan},\ntitle = {Soteria: A Formal Digital-Twin-Enabled Framework for Safety-Assurance of Latency-Aware Cyber-Physical Systems},\nbooktitle = {Proceedings of the 28th ACM International Conference on Hybrid Systems: Computation and Control},\nyear = {2025},\nisbn = {9798400715044},\npublisher = {Association for Computing Machinery},\naddress = {New York, NY, USA},\nurl = {https://doi.org/10.1145/3716863.3718028},\ndoi = {10.1145/3716863.3718028},\narticleno = {22},\nnumpages = {11},\nkeywords = {Formal Methods, Hybrid Systems and Models, Real-Time Cyber-Physical Systems, Temporal Verification},\nlocation = {Irvine, CA, USA},\nseries = {HSCC '25},\nurl_pdf = {papers/wilson-hscc-2025.pdf},\nabstract = {\nVerifying the safety of latency-aware cyber-physical systems is both\ncritical and challenging due to the interaction between continuous\nphysical dynamics and discrete computational constraints. This paper\nintroduces SOTERIA, a formal framework that integrates digital twins\nfor ensuring safety in these systems. SOTERIA models both the physical\ndynamics and computational behavior, enabling integrated verification\nwithin a specific operating environment. This approach goes beyond\nconventional methods that either treat physical and computational\naspects separately or rely on overly conservative worst-case\nanalyses. By modeling hybrid dynamics alongside computational models\nand operating environments, SOTERIA verifies both functional and\ntiming correctness. Leveraging established verification tools, SOTERIA\ndetermines whether end-to-end latencies meet formal specifications,\nbridging the gap between computational and physical requirements. We\nfirst introduce a simple example of a 1D adaptive cruise control\nsystem to illustrate its effectiveness. We then present findings from\na case study using the F1Tenth racing car platform and the UPPAAL tool\nto demonstrate SOTERIA's effectiveness in realistic scenarios,\nenabling safety verification that was previously infeasible with\nconventional schedulability analyses. This work underscores the\nimportance of an integrated verification approach for enhancing safety\nand reliability in autonomous systems.}\n}\n\n\n
@inproceedings{wilson-memocode-2024,\ntitle = {Physics-Aware Mixed-Criticality Systems Design via End-to-End Verification of {CPS}},\nauthor = {Kurt Wilson and Abdullah Al Arafat and John Baugh and Ruozhou Yu and Zhishan Guo},\nbooktitle = {22nd ACM-IEEE International Symposium on Formal Methods and Models for System Design (MEMOCODE)},\nyear = {2024},\npublisher = {ACM/IEEE},\npages = {96-100},\ndoi = {10.1109/MEMOCODE63347.2024.00016},\nurl_pdf = {papers/wilson-memocode-2024.pdf},\nabstract = {\nAutonomous systems are heavily used in many safety-critical systems,\nsuch as industrial automation, autonomous cars, Industrial Internet of\nThings (I-IoT), etc. Verification of the functional and temporal\ncorrectness of such systems is necessary before deployment to ensure\ntheir safety. However, due to the presence of physical systems in the\ncontinuous-time domain and computational models in the discrete-time\ndomain, end-to-end verification of these systems is highly\nchallenging. Existing formal methods focus on verifying physical\nmodels assuming static or simplified computation models. In contrast,\nexisting real-time systems focus on satisfying strict timing bounds\nbut do not care how those bounds are obtained and how they relate to\nphysical safety. Our approach bridges these two domains, and\nconstitutes an end-to-end verification framework for arbitrary\nphysical models and computational models incorporated within a\ncyber-physical automated system. By allowing the interaction between\nthe computational and physical models, our verification framework\nenables a fine-grained scheme that verifies against the local\nenvironment instead of verifying against global worst-case\nassumptions. Moreover, to support locally varying worst-case\nscenarios, a mixed-criticality system is proposed where the system\nsupports several critical models and switches among the modes based on\nenvironmental uncertainty. Finally, a proof-of-concept evaluation of\nthe proposed framework is reported.}\n}\n\n\n
@inproceedings{banach-icgt-2024,\ntitle = {The `Causality' Quagmire for Formalised Bond Graphs},\nauthor = {Banach, Richard and Baugh, John},\nbooktitle = {Graph Transformation},\neditor = {Harmer, Russ and Kosiol, Jens},\nyear = {2024},\npublisher = {Springer Nature Switzerland},\npages = {99-117},\nurl_pdf = {papers/banach-icgt-2024.pdf},\nabstract = {\nBond graphs represent the structure and functionality of mechatronic\nsystems from a power flow perspective. Unfortunately, presentations of\nbond graphs are replete with ambiguity, significantly impeding\nunderstanding. We extend the formalisation in preceding work to\naddress the phenomenon of `causality', intended to help formulate\nsolution strategies for bond graphs, but usually presented in such\nvague terms that the claims made are easily shown to be false. We show\nthat `causality' only works as advertised in the simplest cases, where\nit mimics the mathematical definition of bond graph\nsemantics. Counterexamples severely limit the applicability of the\nnotion.}\n}\n\n\n
@inproceedings{altuntas-iss-2023,\ntitle = {Verifying {ParamGen}: A case study in scientific software abstraction and modeling},\nauthor = {Altuntas, Alper and Baugh, John and Nusbaumer, Jesse},\nbooktitle = {Proceedings of the 2023 Improving Scientific Software Conference},\nnote = {{(No.~NCAR/TN-576+PROC)}},\nyear = {2023},\npages = {1-9},\nurl_pdf = {papers/altuntas-iss-2023.pdf},\ndoi = {10.5065/j0e4-ss70},\nabstract = {\nWe introduce ParamGen, an infrastructure library for climate models to\ngenerate input files that specify physics, parameterizations, and\nother behavior. ParamGen supports arbitrary Python expressions for\nspecifying a default set of runtime parameters and values, thereby\nproviding a high level of expressiveness and flexibility. Initially\ndeveloped for the MOM6 ocean component in Community Earth System Model\n(CESM), ParamGen is now used by several additional CESM\ncomponents. Therefore, it is of high importance that it operates\ncorrectly, i.e., absent any undesired and unexpected behavior. To that\nend, we develop an abstract verification model of ParamGen in Alloy, a\nsoftware modeling and analysis tool with a declarative language that\ncombines first-order logic and relational calculus. We evaluate the\ncorrectness of ParamGen via Alloy and discuss how abstract models and\nformal verification can help quickly frame questions and get answers\nregarding the structure and behavior of our scientific computing\napplications. We also describe our experience in coming to a cleaner\nand well-formed software design and abstractions as a result of the\nmodeling exercise we present in this study.}\n}\n\n\n
@inproceedings{banach-icgt-2023,\ntitle = {Formalisation, abstraction and refinement of bond graphs},\nauthor = {Banach, Richard and Baugh, John},\nbooktitle = {Graph Transformation},\neditor = {Fern{\\'a}ndez, Maribel and Poskitt, Christopher M.},\nOPTbooktitle = {16th International Conference on Graph Transformation (ICGT)},\nOPTnote = {{ICGT 2023}, {Leicester, UK}, {EATCS and IFIP WG 1.3}},\npages = {145-162},\nyear = {2023},\ndoi = {10.1007/978-3-031-36709-0_8},\npublisher = {Springer Nature Switzerland},\nurl_pdf = {papers/banach-icgt-2023.pdf},\nabstract = {\nBond graphs represent the structure and functionality of mechatronic\nsystems from a power flow perspective. Unfortunately, presentations\nof bond graphs are replete with ambiguity, significantly impeding\nunderstanding. A formalisation of the essentials of bond graphs is\ngiven, together with a formalisation of bond graph transformation,\nwhich can directly express abstraction and refinement of bond graphs.}\n}\n\n\n
@article{zhang-jss-2023,\nauthor = {Zhang, Miaomiao and Teng, Yu and Kong, Hui and Baugh, John and Su, Yu and Mi, Junri and Du, Bowen},\ntitle = {Automatic modelling and verification of {Autosar} architectures},\njournal = {Journal of Systems and Software},\nvolume = {201},\npages = {111675},\nyear = {2023},\ndoi = {10.1016/j.jss.2023.111675},\npublisher = {Elsevier},\nurl_pdf = {papers/zhang-jss-2023.pdf},\nkeywords = {Vehicle electronic system, Autosar, formal modelling, timed\n automata, verification},\nabstract = {\nAutosar (AUTomotive Open System ARchitecture) is a development\npartnership whose primary goal is the standardization of basic system\nfunctions and functional interfaces for electronic control units in\nautomobiles. As an open specification, its layered software\narchitecture promotes the interoperability of real-time embedded\nvehicle systems and components. It also opens up the possibility of\nformal modelling and verification approaches, centred around the\nspecification, that can be used to support analysis in the early\nstages of design. In this paper, we describe a methodology and\nassociated tool, called A2A, that automatically models systems defined\nby the Autosar specifications as timed automata, and then verifies\ntheir timing properties using Uppaal. It contains 22 groups of timed\nautomata templates, together with two auxiliary test templates, that\nmodel the Autosar architecture and timing properties, allowing\ntime-related behaviours to be extracted from the three-layer\narchitecture, i.e., the Autosar Software, Autosar Runtime Environment,\nand Basic Software layers, and templates to be automatically\ninstantiated. The timing properties are specified using timed\ncomputation tree logic (TCTL) in Uppaal to verify the system model. We\ndemonstrate the capabilities of the methodology by applying it to an\nAutosar architecture that describes an internal vehicle light control\nsystem, thereby showing its effectiveness.}\n}\n\n\n
@inproceedings{benavides-lcpc-2023,\nauthor = {Juan Benavides and John Baugh and Ganesh Gopalakrishnan},\ntitle = {An {HPC} practitioner's workbench for formal refinement checking},\neditor = {Mendis, C. and Rauchwerger, L.},\nbooktitle = {Languages and Compilers for Parallel Computing, LCPC 2022},\nnote = {Lecture Notes in Computer Science, vol. 13829},\nOPTbooktitle = {35th International Workshop on Languages and Compilers for\n Parallel Computing ({LCPC})},\nyear = {2023},\ndoi = {10.1007/978-3-031-31445-2_5},\nOPTlocation = {Chicago, IL},\npages = {64-72},\npublisher = {Springer, Cham},\nurl_pdf = {papers/benavides-lcpc-2023.pdf},\nkeywords = {Formal methods, scientific computing, parallelism, Alloy},\nisbn = {978-3-031-31445-2},\nabstract = {\nHPC practitioners make use of techniques, such as parallelism and\nsparse data structures, that are difficult to reason about and debug. \nHere we explore the role of data refinement, a correct-by-construction\napproach, in verifying HPC applications via bounded model checking.\nWe show how single program, multiple data (SPMD) parallelism can be\nmodeled in Alloy, a declarative specification language, and describe\ncommon issues that arise when performing scope-complete refinement\nchecks in this context.}\n}\n\n\n
@inproceedings{karrenberg-wdsa-2022,\nauthor = {Cade Karrenberg and Juan Benavides and Emily Berglund and\n Eunsuk Kang and John Baugh},\ntitle = {Identifying cyber-physical vulnerabilities of water distribution\n systems using finite state processes},\nbooktitle = {2nd International Joint Conference on Water Distribution\n System Analysis \\& Computing and Control in the Water Industry,\n {WDSA CCWI 2022}},\nyear = {2022},\nlocation = {Universitat Politècnica de València Valencia (Spain)},\nnumber = {14838},\nOPTdoi = {not-posted-yet},\nurl_pdf = {papers/karrenberg-wdsa-2022.pdf},\nOPTpublisher = {not-sure},\nkeywords = {cyber-security, formal methods, water distribution system,\n cyberphysical system, finite state processes},\nabstract = {\nModern water distribution systems (WDS) comprise not only physical\ninfrastructure, but also use smart meters, sensors, automated control\nsystems and wireless communication links to manage hydraulic processes\nand water quality. Networked devices create new vulnerabilities to\ncyber-attacks, in which an attacker infiltrates connected devices\nthrough internet and network connections with potentially severe\nconsequences, such as creating water supply interruptions and\ncompromising water quality. Attention to cyber-attacks comes at a time\nof notable intrusions into the computer systems that monitor and\ncontrol infrastructure, including the recent attack on a water\ntreatment plant in Oldsmar, Florida. This paper describes an approach\nfor probing a system's vulnerabilities and finding attack scenarios\nthat may cause a disruption, for example, by lowering the pressure in\nwater mains and exposing a system to harmful contaminants. Our\nmodelling framework uses a process calculus called Finite State\nProcesses (FSP), which is a formal notation that also includes a\nsupporting tool, Labelled Transition System Analyzer (LTSA), for\nautomatically checking safety and other desirable properties of\ncomputer systems. Applying formal methods tools, most-often used in\nthe design and testing of hardware and software systems, to WDSs\nallows us to augment traditional simulation approaches with the\nexhaustive model-checking capabilities of tools such as LTSA. This\nframework couples FSP with epanetCPA, which is an open-source toolbox\nthat can simulate attacks on a system's computer and network\ncomponents to evaluate the resulting hydraulic response. Within this\nframework, we treat WDSs as a bounded state control problem to ensure,\nfor instance, that water levels of an elevated storage tank are always\nwithin an acceptable range. Attacker capabilities are broadly defined\nand modelled to allow communication links to be compromised through\neavesdropping and packet injection attacks. Feasible attack scenarios\nare automatically identified and produced by LTSA, which generates\ncounterexamples to a safety property. To incorporate the physics of\nWDSs into FSP, we discretize and quantize systems and calibrate\nobservable behaviors using Python scripts, including the package Water\nNetwork Tool for Resilience (WNTR). Attack scenarios are simulated\nwith epanetCPA for purposes of validation.}\n}\n\n\n
@article{kwag-engstruct-2021,\ntitle = {Significance of multi-hazard risk in design of buildings\nunder earthquake and wind loads},\nauthor = {Shinyoung Kwag and Abhinav Gupta and John Baugh and Hyun-Su Kim},\njournal = {Engineering Structures},\nvolume = {243},\npages = {112623},\nyear = {2021},\nOPTissn = {xxx},\ndoi = {10.1016/j.engstruct.2021.112623},\nOPTurl = {https://www-sciencedirect-com.prox.lib.ncsu.edu/science/article/pii/S0141029621007732},\nurl_pdf = {papers/kwag-engstruct-2021.pdf},\nkeywords = {Earthquake and wind hazards; Performance-based design;\nRisk-based multi-hazard approach; Multi-hazard risk map; Multi-hazard\nscenario; Magneto-rheological damper; Adjacent buildings},\nabstract = {\nTraditionally, external hazards are considered in the design of a\nbuilding through the various combinations of loads prescribed in\nrelevant design codes and standards. It is often the case that the\ndesign is governed by a single dominant hazard at a given geographic\nlocation. This is particularly true for earthquake and wind hazards,\nboth of which impart time-dependent dynamic loads on the\nstructure. Engineers may nevertheless wonder if a building designed\nfor one of the two dominant hazards will satisfactorily withstand the\nother. Prior studies have indicated that in some cases, when a\nbuilding is designed for a single dominant hazard, it does not\nnecessarily provide satisfactory performance against the other\nhazard. In this paper, we propose a novel framework that builds upon\nperformance-based design requirements and determines whether the\ndesign of a building is governed primarily by a single hazard or\nmultiple hazards. It integrates site-dependent hazard characteristics\nwith the performance criteria for a given building type and building\ngeometry. The framework is consistent with the burgeoning area of\nprobabilistic risk assessment, and yet can easily be extended to\ntraditional, deterministically characterized design requirements as\nillustrated herein.}\n}\n\n\n
@inproceedings{dyer-abz-2021,\ntitle = {Sterling: A web-based visualizer for relational modeling languages},\nauthor = {Dyer, Tristan and Baugh, John},\nbooktitle = {Rigorous State Based Methods. ABZ 2021},\nnote = {Lecture Notes in Computer Science 12709},\npages = {99-104},\naddress = {Cham},\nyear = {2021},\npublisher = {Springer},\nOPTisbn = {978-3-030-77543-8},\ndoi = {10.1007/978-3-030-77543-8_7},\nurl_pdf = {papers/dyer-abz-2021.pdf},\nabstract = {\nWe introduce Sterling, a web-based visualization tool that provides\ninteractive views of relational models and allows users to create\ncustom visualizations using modern JavaScript libraries like D3 and\nCytoscape. We outline its design goals and architecture, and describe\ncustom visualizations developed with Sterling that enable verification\nstudies of scientific software used in production. While development\nis driven primarily by the Alloy community, other relational modeling\nlanguages are accommodated by Sterling's data agnostic architecture.}\n}\n\n\n
@inproceedings{curri-hicss-21,\nauthor = {Curri, Danielle and Aziz, Tarek and Baugh, John and Johnson,\n Jeremiah},\ntitle = {Industrial Symbiosis Waste Exchange Identification and\n Optimization},\nbooktitle = {Proceedings of the 54th Hawaii International Conference on\n System Sciences},\nyear = {2021},\npages = {916-925},\nurl = {http://hdl.handle.net/10125/70724},\nurl_pdf = {papers/curri-hicss-21.pdf},\nabstract = {\nIndustrial symbiosis is the concept that waste from industrial\nprocesses can be diverted and then reused as inputs into co-located\nindustrial entities. While research to date has identified successful\nexamples of industrial symbiosis and characterized formation\nprocesses, little is known about how new eco-industrial parks can be\ndesigned and their performance optimized. In this paper, we describe\nhow industrial symbiosis can be modeled and optimized during the\ndevelopment phase to assist in the creation of eco-industrial\nparks. We present a database framework, waste exchange identification\nalgorithm, and Python-based optimization system that generates a\nmixed-integer linear programming model to minimize the amount of\nnon-recycled waste produced. We illustrate the functionality of the\napproach on three test cases that demonstrate increasing levels of\ncomplexity. The optimization model can also accommodate multiple\nobjectives, allowing further exploration of the benefits of industrial\nsymbiosis at the design stage.}\n}\n\n\n
@incollection{banach-festschrift-2020,\nauthor = {Banach, Richard and Baugh, John},\neditor = {Adamatzky, Andrew and Kendon, Vivien},\ntitle = {A simple {Hybrid Event-B} model of an active control system for\n earthquake protection},\nbooktitle = {From Astrophysics to Unconventional Computation},\nyear = {2020},\npublisher = {Springer},\naddress = {Cham},\nseries = {Emergence, Complexity and Computation},\nvolume = {35},\npages = {157-194},\nisbn = {978-3-030-15792-0},\ndoi = {10.1007/978-3-030-15792-0_7},\nOPT_url = {https://doi.org/10.1007/978-3-030-15792-0_7},\nurl_pdf = {papers/banach-festschrift-2020.pdf},\nabstract = {\nIn earthquake-prone zones of the world, severe damage to buildings and\nlife endangering harm to people pose a major risk when severe\nearthquakes happen. In recent decades, active and passive measures to\nprevent building damage have been designed and deployed. A simple\nmodel of an active damage prevention system, founded on earlier work,\nis investigated from a model based formal development perspective,\nusing Hybrid Event-B. The non-trivial physical behaviour in the model\nis readily captured within the formalism. However, when the usual\napproximation and discretization techniques from engineering and\napplied mathematics are used, the rather brittle refinement techniques\nused in model based formal development start to break down. Despite\nthis, the model developed stands up well when compared via simulation\nwith a standard approach. The requirements of a richer formal\ndevelopment framework, better able to cope with applications\nexhibiting non-trivial physical elements are discussed.}\n}\n\n\n\n
@inproceedings{dyer-correctness-2019,\nauthor = {Dyer, Tristan and Altuntas, Alper and Baugh, John},\ntitle = {Bounded verification of sparse matrix computations},\nbooktitle = {Proceedings of the Third International Workshop on\n Software Correctness for {HPC} Applications, {Correctness'19}},\nyear = {2019},\nisbn = {978-1-7281-6015-3},\nlocation = {Denver, CO, USA},\npages = {36-43},\ndoi = {10.1109/Correctness49594.2019.00010},\nurl_pdf = {papers/dyer-correctness-2019.pdf},\npublisher = {IEEE/ACM},\nkeywords = {sparse matrix formats, state-based formal methods, mechanical\n verification},\nabstract = {\nWe show how to model and reason about the structure and behavior of\nsparse matrices, which are central to many applications in scientific\ncomputation. Our approach is state-based, relying on a formalism\ncalled Alloy to show that one model is a refinement of another. We\npresent examples of sparse matrix-vector multiplication, transpose,\nand translation between formats using ELLPACK and compressed sparse\nrow formats to demonstrate the approach. To model matrix computations\nin a declarative language like Alloy, a new idiom is presented for\nbounded iteration with incremental updates. Mechanical verification\nis performed using SAT solvers built into the tool.}\n}\n\n\n
@inproceedings{altuntas-correctness-2018,\nauthor = {Altuntas, Alper and Baugh, John},\ntitle = {Hybrid theorem proving as a lightweight method for verifying\n numerical software},\nbooktitle = {Proceedings of the Second International Workshop on\n Software Correctness for {HPC} Applications, {Correctness'18}},\nyear = {2018},\nisbn = {978-1-7281-0226-9},\nlocation = {Dallas, TX, USA},\npages = {1-8},\ndoi = {10.1109/Correctness.2018.00005},\nurl_pdf = {papers/altuntas-correctness-2018.pdf},\npublisher = {IEEE},\nkeywords = {hybrid systems, formal methods, scientific computation,\n KeYmaera X},\nabstract = {\nLarge-scale numerical software requires substantial computer resources\nthat complicate testing and debugging. A single run of a climate model\nmay require many millions of core-hours and terabytes of disk space,\nmaking trial-and-error experiments burdensome and time consuming. In\nthis study, we apply hybrid theorem proving from the field of\ncyber-physical systems to problems in scientific computation, and show\nhow to verify the correctness of discrete updates that appear in the\nsimulation of continuous physical systems. By viewing numerical\nsoftware as a hybrid system that combines discrete and continuous\nbehavior, test coverage and confidence in findings can be\nincreased. We describe abstraction approaches for modeling numerical\nsoftware and demonstrate the applicability of the approach in a case\nstudy that reproduces undesirable behavior encountered in a\nparameterization scheme, called the K-profile parameterization, widely\nused in ocean components of large-scale climate models. We then\nidentify and model a fix in the configuration of the scheme, and\nverify that the undesired behavior is eliminated for all possible\nexecution sequences. We conclude that hybrid theorem proving is an\neffective and efficient approach that can be used to verify and reason\nabout properties of large-scale numerical software.}\n}\n\n\n
@article{liu-ecs-2018,\ntitle = {Numerical study on factors influencing typhoon-induced storm\nsurge distribution in {Zhanjiang Harbor}},\nauthor = {Xing Liu and Wensheng Jiang and Bo Yang and John Baugh},\njournal = {Estuarine, Coastal and Shelf Science},\nvolume = {215},\npages = {39-51},\nyear = {2018},\nissn = {0272-7714},\ndoi = {10.1016/j.ecss.2018.09.019},\nOPTurl = {http://www.sciencedirect.com/science/article/pii/S027277141830009X},\nurl_pdf = {papers/liu-ecs-2018.pdf},\nkeywords = {Local atmospheric effect, Remote atmospheric effect, Storm\n surge, Spatial distribution, Sea level rise, Typhoon track},\nabstract = {\nA 2-D unstructured finite element model is used to study how local and\nremote atmospheric forcing, sea level rise, and shoreline variation\naffect typhoon-induced storm surge in a small shallow bay, Zhanjiang\nHarbor (ZH). In this research, the spatial distribution of storm surge\nis divided into three patterns in ZH, denoted E-W, N-S, and S-N, using\na quantitative method. In the Bay, local atmospheric effects (LAE) and\nremote atmospheric effects (RAE) both play important roles in the\nmaximum residual water level. The contribution of RAE to the inflow is\nhigher than that of the LAE, but the former is less important in the\nspatial distribution in ZH. In addition, the typhoon track influences\nthe time of occurrence of the maximum surge by forcing the outer\nwaters to ZH, then the spatial distribution of the surge residual in\nthe bay is controlled by local winds, and different regions are\nthreatened during different kinds of storm surge processes. Two sea\nlevel rise scenarios are set up in the paper as well, and the results\nshow that the trends of the changes in LAE and RAE in the inner-bay\nare the opposite in the case of sea level rise; however, the total\nchanges of the distribution are not the same in different\ncategories. In general, the E-W category storm surge is weakened,\nwhile the N-S and S-N category storm surges have inverse changes in\nthe north and south of ZH. There is a downward trend of the maximum\nsurge gradient within the Bay, but relative to sea level rise itself\nthis effect is not obvious. The establishment of the sea embankment\nincreased the storm surge within the bay though it is not significant.}\n}\n\n\n
@inproceedings{baugh-abz-2018,\ntitle = {State-based formal methods in scientific computation},\nauthor = {Baugh, John and Dyer, Tristan},\nOPTeditor = {Butler, Michael and Raschke, Alexander and Hoang, Thai Son\n and Reichl, Klaus},\nbooktitle = {Abstract State Machines, Alloy, B, TLA, VDM, and Z: 6th\n International Conference, ABZ 2018},\nnote = {Lecture Notes in Computer Science 10817},\npages = {392-396},\naddress = {Cham},\nyear = {2018},\npublisher = {Springer},\nisbn = {978-3-319-91271-4},\ndoi = {10.1007/978-3-319-91271-4_29},\nurl_pdf = {papers/baugh-abz-2018.pdf},\nabstract = {\nControl systems, protocols, and hardware design are among the most\ncommon applications of state-based formal methods, and yet the types\nof modeling and analysis they enable are also well-suited to problems\nin scientific computation, where quality, reproducibility, and\nproductivity are growing concerns. We survey the challenges faced by\ndevelopers of scientific software, characterize the nature of the\nprograms they write, and offer some perspective on the role that\nstate-based methods can play in scientific domains.}\n} \n\n\n
@article{baugh-scp-2018,\ntitle = {Formal methods and finite element analysis of hurricane storm\n surge: A case study in software verification},\nauthor = {John Baugh and Alper Altuntas},\njournal = {Science of Computer Programming},\nvolume = {158},\npages = {100-121},\nyear = {2018},\nOPTnote = {Abstract State Machines, Alloy, B, TLA, VDM and Z (ABZ 2016)},\nissn = {0167-6423},\ndoi = {10.1016/j.scico.2017.08.012},\nOPTurl = {{http://www.sciencedirect.com/science/article/pii/S0167642317301715}},\nurl_pdf = {papers/baugh-scp-2018.pdf},\nkeywords = {Formal methods, Model checking, Scientific computing,\n Earth and atmospheric sciences},\nabstract = {\nUsed to predict the effects of hurricane storm surge, ocean\ncirculation models are essential tools for evacuation planning,\nvulnerability assessment, and infrastructure design. Implemented as\nnumerical solvers that operate on large-scale datasets, these models\ndetermine the geographic extent and severity of coastal floods and\nother impacts. In this study, we look at an ocean circulation model\nused in production and an extension made to it that offers substantial\nperformance gains. To explore implementation choices and ensure\nsoundness of the extension, we make use of Alloy, a declarative\nmodeling language with tool support and an automatic form of analysis\nperformed within a bounded scope using a SAT solver. Abstractions for\nrelevant parts of the ocean circulation model are presented, including\nthe physical representation of land and seafloor surfaces as a finite\nelement mesh, and an algorithm operating on it that allows for the\npropagation of overland flows. The approach allows us to draw useful\nconclusions about implementation choices and guarantees about the\nextension, in particular that it is equivalence preserving.}\n}\n\n\n
@inproceedings{altuntas-correctness-2017,\nauthor = {Altuntas, Alper and Baugh, John},\ntitle = {Verifying concurrency in an adaptive ocean circulation model},\nbooktitle = {Proceedings of the First International Workshop on\n Software Correctness for {HPC} Applications, {Correctness'17}},\nyear = {2017},\nisbn = {978-1-4503-5127-0},\nlocation = {Denver, CO, USA},\npages = {1-7},\ndoi = {10.1145/3145344.3145346},\nOPTurl = {{http://doi.acm.org/10.1145/3145344.3145346}},\nurl_pdf = {papers/altuntas-correctness-2017.pdf},\nacmid = {3145346},\npublisher = {ACM},\nOPTaddress = {New York, NY, USA},\nkeywords = {Scientific computing, concurrency, finite element\n analysis, hurricane storm surge, model checking},\nabstract = {\nWe present a model checking approach for verifying the correctness of\nconcurrency in numerical models. The forms of concurrency we address\nare from (1) coupled modeling where distinct components, e.g., ocean,\nwave, and atmospheric, exchange interface conditions during runtime,\nand (2) multi-instance modeling where local variations of the same\nnumerical model are executed concurrently to minimize common (and\ntherefore redundant) computations. We present general guidelines for\nrepresenting these forms of concurrency in an abstract verification\nmodel and then apply them to an adaptive ocean circulation model that\ndetermines the geographic extent and severity of coastal floods. The\nocean model employs multi-instance concurrency: a collection of\nengineering design and failure scenarios are concurrently simulated\nusing patches, regions of a grid that grow and shrink based on the\nhydrodynamic changes induced by each scenario. We show how concurrency\ninherent in the simulation model can be represented in a verification\nmodel to ensure correctness and to automatically generate safe\nsynchronization arrangements.}\n}\n\n\n
@article{altuntas-ocemod-2017,\ntitle = {Adaptive subdomain modeling: A multi-analysis technique for\n ocean circulation models},\nauthor = {Alper Altuntas and John Baugh},\njournal = {Ocean Modelling},\nvolume = {115},\npages = {86-104},\nyear = {2017},\nissn = {1463-5003},\ndoi = {10.1016/j.ocemod.2017.05.009},\nOPTurl = {{http://www.sciencedirect.com/science/article/pii/S146350031730080X}},\nurl_pdf = {papers/altuntas-ocemod-2017.pdf},\nkeywords = {Storm surge, Adaptive algorithm, Subdomain modeling,\n Moving boundaries, ADCIRC},\nabstract = {\nMany coastal and ocean processes of interest operate over large\ntemporal and geographical scales and require a substantial amount of\ncomputational resources, particularly when engineering design and\nfailure scenarios are also considered. This study presents an adaptive\nmulti-analysis technique that improves the efficiency of these\ncomputations when multiple alternatives are being simulated. The\ntechnique, called adaptive subdomain modeling, concurrently analyzes\nany number of child domains, with each instance corresponding to a\nunique design or failure scenario, in addition to a full-scale parent\ndomain providing the boundary conditions for its children. To contain\nthe altered hydrodynamics originating from the modifications, the\nspatial extent of each child domain is adaptively adjusted during\nruntime depending on the response of the model. The technique is\nincorporated in ADCIRC++, a re-implementation of the popular ADCIRC\nocean circulation model with an updated software architecture designed\nto facilitate this adaptive behavior and to utilize concurrent\nexecutions of multiple domains. The results of our case studies\nconfirm that the method substantially reduces computational effort\nwhile maintaining accuracy.}\n}\n\n\n
@article{baugh-istruc-2016,\ntitle = {A general characterization of the {Hardy} {Cross} method as\n sequential and multiprocess algorithms},\nauthor = {Baugh, John and Liu, Shu},\njournal = {Structures},\nvolume = {6},\npages = {170-181},\nyear = {2016},\nissn = {2352-0124},\ndoi = {10.1016/j.istruc.2016.03.004},\nOPTurl = {{https://www.sciencedirect.com/science/article/pii/S2352012416300054}},\nurl_pdf = {papers/baugh-istruc-2016.pdf},\nkeywords = {Algorithms, Concurrency, Convergence, Hardy Cross method,\n Moment distribution},\nabstract = {\nThe Hardy Cross method of moment distribution admits, for any problem,\nan entire family of distribution sequences. Intuitively, the method\ninvolves clamping the joints of beams and frames against rotation and\nbalancing moments iteratively, whether consecutively, simultaneously,\nor in some combination of the two. We present common versions of the\nmoment distribution algorithm and generalizations of them as both\nsequential and multiprocess algorithms, with the latter exhibiting the\nfull range of asynchronous behavior allowed by the method. We prove,\nin the limit, that processes so defined converge to the same unique\nsolution regardless of the distribution sequence or interleaving of\nsteps. In defining the algorithms, we avoid overspecifying the order\nof computation initially using a sequential, nondeterministic process,\nand then more generally using concurrent processes.}\n}\n\n\n
@inproceedings{baugh-abz-2016,\ntitle = {Modeling a discrete wet-dry algorithm for hurricane storm\n surge in {Alloy}},\nauthor = {Baugh, John and Altuntas, Alper},\nOPTeditor = {Butler, Michael and Schewe, Klaus-Dieter and Mashkoor, Atif\n and Biro, Miklos},\nbooktitle = {Abstract State Machines, Alloy, B, TLA, VDM, and Z: 5th\n International Conference, ABZ 2016},\nnote = {Lecture Notes in Computer Science 9675},\npages = {256-261},\naddress = {Cham},\nyear = {2016},\npublisher = {Springer},\nisbn = {978-3-319-33600-8},\ndoi = {10.1007/978-3-319-33600-8_18},\nurl_pdf = {papers/baugh-abz-2016.pdf},\nabstract = {\nWe describe an Alloy model that helps check the correctness of a\ndiscrete wet-dry algorithm used in a system for hurricane storm surge\nprediction. Derived from simplified physics and encoded with empirical\nrules, the algorithm operates on a finite element mesh to allow the\npropagation of overland flows. Our study is motivated by complex\ninteractions between the algorithm and a recent performance\nenhancement to the system that involves mesh partitioning. We briefly\noutline our approach and describe safety properties of the extension,\nas well as directions for future work.}\n}\n\n\n
@article{dyer-ades-2016,\ntitle = {{SMT}: An interface for localized storm surge modeling},\nauthor = {Dyer, Tristan and Baugh, John},\njournal = {Advances in Engineering Software},\nvolume = {92},\npages = {27-39},\nyear = {2016},\nissn = {0965-9978},\ndoi = {10.1016/j.advengsoft.2015.10.003},\nOPTurl = {{http://www.sciencedirect.com/science/article/pii/S0965997815001428}},\nurl_pdf = {papers/dyer-ades-2016.pdf},\nkeywords = {ADCIRC, Finite element analysis, Hurricane storm surge,\n Range search, Subdomain modeling, Visualization},\nabstract = {\nThe devastation wrought by Hurricanes Katrina (2005), Ike (2008), and\nSandy (2012) in recent years continues to underscore the need for\nbetter prediction and preparation in the face of storm surge and\nrising sea levels. Simulations of coastal flooding using physically\nbased hydrodynamic codes like ADCIRC, while very accurate, are also\ncomputationally expensive, making them impractical for iterative\ndesign scenarios that seek to evaluate a range of countermeasures and\npossible failure points. We present a graphical user interface that\nsupports local analysis of engineering design alternatives based on an\nexact reanalysis technique called subdomain modeling, an approach that\nsubstantially reduces the computational effort required. This\ninterface, called the Subdomain Modeling Tool (SMT), streamlines the\npre- and post-processing requirements of subdomain modeling by\nallowing modelers to extract regions of interest interactively and by\norganizing project data on the file system. Software design and\nimplementation issues that make the approach practical, such as a\nnovel range search algorithm, are presented. Descriptions of the\noverall methodology, software architecture, and performance results\nare given, along with a case study demonstrating its use.}\n}\n\n\n
@article{baugh-ceng-2015,\ntitle = {An exact reanalysis technique for storm surge and tides in a\n geographic region of interest},\nauthor = {Baugh, John and Altuntas, Alper and Dyer, Tristan and Simon, Jason},\njournal = {Coastal Engineering},\nvolume = {97},\npages = {60-77},\nyear = {2015},\nissn = {0378-3839},\ndoi = {10.1016/j.coastaleng.2014.12.003},\nOPTurl = {{http://www.sciencedirect.com/science/article/pii/S0378383914002208}},\nurl_pdf = {papers/baugh-ceng-2015.pdf},\nkeywords = {ADCIRC, Hurricane, Storm surge, Subdomain modeling},\nabstract = {\nUnderstanding the effects of storm surge in hurricane-prone regions is\nnecessary for protecting public and lifeline services and improving\nresilience. While coastal ocean hydrodynamic models like ADCIRC may be\nused to assess the extent of inundation, the computational cost may be\nprohibitive since many local changes corresponding to design and\nfailure scenarios would ideally be considered. We present an exact\nreanalysis technique and corresponding implementation that enable the\nassessment of local subdomain changes with less computational effort\nthan would be required by a complete resimulation of the full\ndomain. So long as the subdomain is large enough to fully contain the\naltered hydrodynamics, changes may be made and simulations performed\nwithin it without the need to calculate new boundary values. Accurate\nresults are obtained even when subdomain boundary conditions are\nforced only intermittently, and convergence is demonstrated by\nprogressively increasing the frequency at which they are\napplied. Descriptions of the overall methodology, performance results,\nand accuracy, as well as case studies, are presented.}\n}\n\n\n