@inproceedings{gurriet_towards_2018, title = {Towards a {Framework} for {Realizable} {Safety} {Critical} {Control} through {Active} {Set} {Invariance}}, copyright = {6.5/10}, doi = {10.1109/ICCPS.2018.00018}, abstract = {This paper presents initial results towards a realizable framework for the safety critical controlled invariance of cyber-physical systems. The main contribution of this paper is the development of a control barrier function based methodology which can be used to enforce set invariance on systems in the presence of non-linear disturbances and uncertainty. The first part of this work is a review of the current methods available for finding viable sets and how they are linked to practical choices regarding safety. Their limitations and directions towards improvements when it comes to handling model uncertainty are also highlighted. The second part of this work is the formulation of a condition which can guarantee set invariance in the presence of generic uncertain in the dynamics. An associated optimization problem to enforce that condition is proposed and a method to convexify the problem and make it solvable in real-time is formally presented. The effectiveness of the proposed framework is illustrated experimentally on a two-wheeled inverted pendulum.}, booktitle = {2018 {ACM}/{IEEE} 9th {International} {Conference} on {Cyber}-{Physical} {Systems} ({ICCPS})}, author = {Gurriet, Thomas and Singletary, Andrew and Reher, Jacob and Ciarletta, Laurent and Feron, Eric and Ames, Aaron}, month = apr, year = {2018}, keywords = {Barrier functions, Control systems, Cyber-physical systems, Kernel, Lyapunov methods, Non linear control, Optimization, Real time optimization, Safety, Set invariance, Uncertainty, active set invariance, control barrier function based methodology, control system synthesis, cyber-physical systems, model uncertainty, nonlinear control systems, nonlinear disturbances, optimisation, optimization problem, pendulums, realizable framework, realizable safety critical control, safety, safety critical controlled invariance, two-wheeled inverted pendulum, uncertain systems, wheels}, pages = {98--106}, }
@article{ title = {An Impressionistic Cartographic Solution for Base Map Land Cover with Coarse Pixel Data}, type = {article}, year = {2017}, keywords = {enhancement,generalization,land cover,raster upsampling,topographic mapping,uncertainty}, websites = {http://cartographicperspectives.org/index.php/journal/article/view/1351/1486}, id = {1258d797-a807-3f62-9982-9d779b5b8e74}, created = {2018-05-29T14:05:42.912Z}, file_attached = {false}, profile_id = {6d8d7993-9618-3f6c-983a-9f6761313797}, group_id = {4f1d95d1-59ee-3ce8-85ce-055cfae2da74}, last_modified = {2018-05-29T14:05:42.912Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {14091}, source_type = {article}, private_publication = {false}, abstract = {Several everyday cartography applications do not require sharply precise base maps, and in fact benefit from their generalization or deliberate obscuration, such as tourist or transit maps. Additionally, raster data fine enough for a given map scale are not always available. We present a method of creating an impressionistic land cover base map for topographic mapping in which the above two conditions are true, using the National Land Cover Database (NLCD) of the US Geological Survey (USGS). The method is based on reclassification, upsampling, constrained randomization at class boundary edges, and deliberate use of colors with very similar lightness values. The method spans both scientific geospatial data treatment and artistic cartographic design, and both generalizes and enhances the data. The processing, automated in ArcGIS\texttrademark, is detailed, and examples of the product are provided. ~}, bibtype = {article}, author = {Raposo, Paulo and Brewer, Cynthia A and Sparks, Kevin}, journal = {Cartographic Perspectives; No 83 (2016)} }
@article{ title = {Sensitivity analysis of the physical dynamics of the Fly River plume in Torres Strait}, type = {article}, year = {2017}, identifiers = {[object Object]}, keywords = {Altimetry,Modelling,River plume,Uncertainty,Wind}, pages = {84 - 91}, volume = {194}, websites = {http://www.sciencedirect.com/science/article/pii/S0272771416307296}, id = {9ccf8d89-529c-30c8-8c2d-42158fd5d286}, created = {2018-05-24T11:49:17.422Z}, file_attached = {false}, profile_id = {b3012b7e-6b18-3e87-a6ca-1357ce23063d}, group_id = {f84f96bc-73de-3c3e-befe-041dabceaf3c}, last_modified = {2018-05-24T11:49:17.684Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {false}, hidden = {false}, source_type = {article}, folder_uuids = {936b7a49-1c3a-454c-91fb-8138fa4fd8f7}, private_publication = {false}, abstract = {The intrusion in the Torres Strait of the Fly River plume polluted by mine tailings is an international water issue. To investigate the physical mechanisms involved in the intrusion process, numerical simulations were conducted to assess the relative influence of the bathymetry and the external forcing, namely the tides, the mean sea level slope across the strait, river runoff and wind forcing. The wind data from Horn Island, the only long-term wind station in the Torres Strait, is shown to be unreliable due to orographic effects. Mean sea level data from altimetry compare well with those from tide gauges in these shallow, reef-studded waters. The wind has a dominant effect on the mean sea level at seasonal and inter-annual periods. The resulting mean sea level difference across the strait fluctuated seasonally and strongly influenced the intrusion of the Fly River plume in the Torres Strait. The 3D finite-volume MOHID model reproduced the observation that the river plume starts by being stratified in the Gulf of Papua where it originates, and it mixes vertically when it enters the Torres Strait. The MOHID and the SLIM models were applied with different resolution to the Torres Strait and responded similarly to the external forcings. The predicted and observed Fly River plume intrusion in the eastern Torres Strait agreed well with each other, including the formation of patches due to flow reversals. However, the two models predicted a widely different Fly River plume in its far field in the western Torres Strait, the differences were attributed to the different bathymetry in the Australian and British-US bathymetry data for these poorly charted waters, which demonstrated the importance of the details of the bathymetry in controlling the extent of plume intrusion.}, bibtype = {article}, author = {Li, Yanfang and Martins, Flavio and Wolanski, Eric}, journal = {Estuarine, Coastal and Shelf Science} }
@article{doblas-mirandaReviewCombinationGlobal2017, title = {A Review of the Combination among Global Change Factors in Forests, Shrublands and Pastures of the {{Mediterranean Region}}: Beyond Drought Effects}, author = {{Doblas-Miranda}, E. and Alonso, R. and Arnan, X. and Bermejo, V. and Brotons, L. and {de las Heras}, J. and Estiarte, M. and H{\'o}dar, J. A. and Llorens, P. and Lloret, F. and {L{\'o}pez-Serrano}, F. R. and {Mar{\'t}{\i}nez-Vilalta}, J. and Moya, D. and Pe{\~n}uelas, J. and Pino, J. and Rodrigo, A. and {Roura-Pascual}, N. and Valladares, F. and Vil{\`a}, M. and Zamora, R. and Retana, J.}, year = {2017}, month = jan, volume = {148}, pages = {42--54}, issn = {0921-8181}, doi = {10.1016/j.gloplacha.2016.11.012}, abstract = {[Highlights] [::] Different global change factors combine causing unprecedented ecological effects. [::] Much more complex interactions arise when combinations occur together. [::] Drought should be considered when designing and applying management policies. [::] Conserving Mediterranean terrestrial ecosystems is a collective effort. [Abstract] Climate change, alteration of atmospheric composition, land abandonment in some areas and land use intensification in others, wildfires and biological invasions threaten forests, shrublands and pastures all over the world. However, the impacts of the combinations between global change factors are not well understood despite its pressing importance. Here we posit that reviewing global change factors combination in an exemplary region can highlight the necessary aspects in order to better understand the challenges we face, warning about the consequences, and showing the challenges ahead of us. The forests, shrublands and pastures of the Mediterranean Basin are an ideal scenario for the study of these combinations due to its spatial and temporal heterogeneity, increasing and diverse human population and the historical legacy of land use transformations. The combination of multiple global change factors in the Basin shows different ecological effects. Some interactions alter the effects of a single factor, as drought enhances or decreases the effects of atmospheric components on plant ecophysiology. Several interactions generate new impacts: drought and land use changes, among others, alter water resources and lead to land degradation, vegetation regeneration decline, and expansion of forest diseases. Finally, different factors can occur alone or simultaneously leading to further increases in the risk of fires and biological invasions. The transitional nature of the Basin between temperate and arid climates involves a risk of irreversible ecosystem change towards more arid states. However, combinations between factors lead to unpredictable ecosystem alteration that goes beyond the particular consequences of drought. Complex global change scenarios should be studied in the Mediterranean and other regions of the world, including interregional studies. Here we show the inherent uncertainty of this complexity, which should be included in any management strategy.}, journal = {Global and Planetary Change}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-14277883,~to-add-doi-URL,agricultural-abandonment,arid-region,climate-change,complexity,degradation,droughts,ecosystem,feedback,forest-fires,forest-management,forest-pests,forest-resources,global-change,integrated-natural-resources-modelling-and-management,land-use,mediterranean-region,non-linearity,pastures,review,shrubs,species-decline,uncertainty,vegetation,water-resources,wildfires}, lccn = {INRMM-MiD:c-14277883} }
@article{anderson_continuum_2017, title = {Continuum {Reconfigurable} {Parallel} {Robots} for {Surgery}: {Shape} {Sensing} and {State} {Estimation} {With} {Uncertainty}}, volume = {2}, issn = {2377-3766}, shorttitle = {Continuum {Reconfigurable} {Parallel} {Robots} for {Surgery}}, doi = {10.1109/LRA.2017.2678606}, abstract = {This letter examines shape sensing for a new class of surgical robot that consists of parallel flexible structures that can be reconfigured inside the human body. Known as continuum reconfigurable incisionless surgical parallel (CRISP) robots, these devices provide access to the human body through needle-sized entry points, yet can be configured into trusslike structures capable of dexterous movement and large force application. They can also be reconfigured as needed during a surgical procedure. Since CRISP robots are elastic, they will deform when subjected to external forces or other perturbations. In this letter, we explore how to combine sensor information with mechanics-based models for CRISP robots to estimate their shapes under applied loads. The end result is a shape sensing framework for CRISP robots that will enable future research on control under applied loads, autonomous motion, force sensing, and other robot behaviors.}, number = {3}, journal = {IEEE Robotics and Automation Letters}, author = {Anderson, P. L. and Mahoney, A. W. and Webster, R. J.}, month = jul, year = {2017}, keywords = {CRISP robots, Flexible robots, Needles, Parallel robots, Robot sensing systems, Shape, Uncertainty, autonomous motion, continuum reconfigurable incisionless surgical parallel robots, continuum reconfigurable parallel robots, dexterous manipulators, dexterous movement, flexible manipulators, flexible robots, force sensing, mechanics-based models, medical robotics, needles, parallel flexible structures, shape sensing, state estimation, surgery, surgical robot, surgical robotics: laparoscopy, surgical robotics: steerable catheters/needles, truss-like structures}, pages = {1617--1624} }
@TECHREPORT{arxiv:1607.04174, OPTADDRESS = {}, AUTHOR = {Shawn Andrews and Ghassan Hamarneh}, INSTITUTION = {}, MONTH = {7}, OPTNOTE = {}, NUMBER = {arxiv:1607.04174}, PAGES = {1-9}, TITLE = {Adaptable Precomputation for Random Walker Image Segmentation and Registration}, OPTTYPE = {}, YEAR = {2016}, OPTABSTRACT = {}, OPTDOI = {}, OPTISBN = {}, OPTISSN = {}, KEYWORDS = {Segmentation, Registration and Matching, Optimization, Uncertainty}, OPTURL = {}, OPTURL-PUBLISHER = {}, PDF = {http://www.cs.sfu.ca/~hamarneh/ecopy/arxiv_1607_04174.pdf} }
@article{genske_rethinking_2016, title = {Rethinking risk assessment for emerging technology first-in-human trials}, volume = {19}, issn = {13867423}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84939218906&doi=10.1007%2fs11019-015-9660-7&partnerID=40&md5=0c69b756c4bfbb6e11e76d2fd079ed7f}, doi = {10.1007/s11019-015-9660-7}, abstract = {Recent progress in synthetic biology (SynBio) has enabled the development of novel therapeutic opportunities for the treatment of human disease. In the near future, first-in-human trials (FIH) will be indicated. FIH trials mark a key milestone in the translation of medical SynBio applications into clinical practice. Fostered by uncertainty of possible adverse events for trial participants, a variety of ethical concerns emerge with regards to SynBio FIH trials, including ‘risk’ minimization. These concerns are associated with any FIH trial, however, due to the novelty of the approach, they become more pronounced for medical applications of emerging technologies (emTech) like SynBio. To minimize potential harm for trial participants, scholars, guidelines, regulations and policy makers alike suggest using ‘risk assessment’ as evaluation tool for such trials. Conversely, in the context of emTech FIH trials, we believe it to be at least questionable to contextualize uncertainty of potential adverse events as ‘risk’ and apply traditional risk assessment methods. Hence, this issue needs to be discussed to enable alterations of the evaluation process before the translational phase of SynBio applications begins. In this paper, we will take the opportunity to start the debate and highlight how a misunderstanding of the concept of risk, and the possibilities and limitations of risk assessment, respectively, might impair decision-making by the relevant regulatory authorities and research ethics committees, and discuss possible solutions to tackle the issue. © 2015, Springer Science+Business Media Dordrecht.}, language = {eng}, number = {1}, journal = {Medicine, Health Care and Philosophy}, author = {Genske, Anna and Engel-Glatter, Sabrina}, year = {2016}, note = {1}, keywords = {10 Ignorance, uncertainty and risk, Clinical Trials as Topic, Humans, Ignorance, incertitude et risque, PRINTED (Fonds papier), Uncertainty, clinical trial (topic), ethics, human, organization and management, risk assessment, synthetic biology}, pages = {125--139}, }
@article{naudtsEuropeForestManagement2016, title = {Europe's Forest Management Did Not Mitigate Climate Warming}, author = {Naudts, Kim and Chen, Yiying and McGrath, Matthew J. and Ryder, James and Valade, Aude and Otto, Juliane and Luyssaert, Sebastiaan}, year = {2016}, volume = {351}, pages = {597--600}, issn = {1095-9203}, doi = {10.1126/science.aad7270}, abstract = {[Europe's managed forests contribute to warming] For most of the past 250 years, surprisingly it seems that Europe's managed forests have been a net source of carbon, contributing to climate warming rather than mitigating it. Naudts et al. reconstructed the history of forest management in Europe in the context of a land-atmosphere model. The release of carbon otherwise stored in litter, dead wood, and soil carbon pools in managed forests was one key factor contributing to climate warming. Second, the conversion of broadleaved forests to coniferous forests has changed the albedo and evapotranspiration of those forests, also leading to warming. Thus, climate change mitigation policies in Europe and elsewhere may need to consider changes in forest management. [Abstract] Afforestation and forest management are considered to be key instruments in mitigating climate change. Here we show that since 1750, in spite of considerable afforestation, wood extraction has led to Europe's forests accumulating a carbon debt of 3.1 petagrams of carbon. We found that afforestation is responsible for an increase of 0.12 watts per square meter in the radiative imbalance at the top of the atmosphere, whereas an increase of 0.12 kelvin in summertime atmospheric boundary layer temperature was mainly caused by species conversion. Thus, two and a half centuries of forest management in Europe have not cooled the climate. The political imperative to mitigate climate change through afforestation and forest management therefore risks failure, unless it is recognized that not all forestry contributes to climate change mitigation.}, journal = {Science}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13925692,~to-add-doi-URL,albedo,anthropic-feedback,bias-toward-primacy-of-theory-over-reality,climate-change,complexity,forest-management,forest-resources,global-warming,historical-perspective,incomplete-knowledge,local-over-complication,modelling-uncertainty,science-based-decision-making,science-policy-interface,uncertainty,unexpected-effect,unknown}, lccn = {INRMM-MiD:c-13925692}, number = {6273} }
@inproceedings{Conway:2016:MTB:2851581.2892433, Acmid = {2892433}, Address = {New York, NY, USA}, Author = {Conway, Dan and Chen, Fang and Yu, Kun and Zhou, Jianlong and Morris, Richard}, Booktitle = {Proceedings of the 2016 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, Doi = {10.1145/2851581.2892433}, Isbn = {978-1-4503-4082-3}, Keywords = {HCI, decision making, learning, trust, uncertainty}, Location = {San Jose, California, USA}, Numpages = {7}, Pages = {3035--3041}, Publisher = {ACM}, Series = {CHI EA '16}, Title = {Misplaced Trust: A Bias in Human-Machine Trust Attribution -- In Contradiction to Learning Theory}, Url = {http://doi.acm.org/10.1145/2851581.2892433}, Year = {2016}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/2851581.2892433}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/2851581.2892433}}
@article{kimSoilErosionAssessment2016, title = {Soil Erosion Assessment - {{Mind}} the Gap}, author = {Kim, Jongho and Ivanov, Valeriy Y. and Fatichi, Simone}, year = {2016}, month = dec, volume = {43}, pages = {2016GL071480+}, issn = {0094-8276}, doi = {10.1002/2016gl071480}, abstract = {Accurate assessment of erosion rates remains an elusive problem because soil loss is strongly nonunique with respect to the main drivers. In addressing the mechanistic causes of erosion responses, we discriminate between macroscale effects of external factors -- long studied and referred to as '' geomorphic external variability'', and microscale effects, introduced as '' geomorphic internal variability.'' The latter source of erosion variations represents the knowledge gap, an overlooked but vital element of geomorphic response, significantly impacting the low predictability skill of deterministic models at field-catchment scales. This is corroborated with experiments using a comprehensive physical model that dynamically updates the soil mass and particle composition. As complete knowledge of microscale conditions for arbitrary location and time is infeasible, we propose that new predictive frameworks of soil erosion should embed stochastic components in deterministic assessments of external and internal types of geomorphic variability.}, journal = {Geophys. Res. Lett.}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-14257841,~to-add-doi-URL,erodibility,local-scale,modelling-uncertainty,soil-erosion,soil-resources,uncertainty}, lccn = {INRMM-MiD:c-14257841}, number = {24} }
@article{andersonTroubleNegativeEmissions2016, title = {The Trouble with Negative Emissions}, author = {Anderson, Kevin and Peters, Glen}, year = {2016}, month = oct, volume = {354}, pages = {182--183}, issn = {1095-9203}, doi = {10.1126/science.aah4567}, abstract = {In December 2015, member states of the United Nations Framework Convention on Climate Change (UNFCCC) adopted the Paris Agreement, which aims to hold the increase in the global average temperature to below 2\textdegree C and to pursue efforts to limit the temperature increase to 1.5\textdegree C. The Paris Agreement requires that anthropogenic greenhouse gas emission sources and sinks are balanced by the second half of this century. Because some nonzero sources are unavoidable, this leads to the abstract concept of '' negative emissions,'' the removal of carbon dioxide (CO2) from the atmosphere through technical means. The Integrated Assessment Models (IAMs) informing policy-makers assume the large-scale use of negative-emission technologies. If we rely on these and they are not deployed or are unsuccessful at removing CO2 from the atmosphere at the levels assumed, society will be locked into a high-temperature pathway. [Excerpt] [...] The promise of future and cost-optimal negative-emission technologies is more politically appealing than the prospect of developing policies to deliver rapid and deep mitigation now. If negative-emission technologies do indeed follow the idealized, rapid, and successful deployment assumed in the models, then any reduction in near-term mitigation caused by the appeal of negative emissions will likely lead to only a small and temporary overshoot of the Paris temperature goals. In stark contrast, if the many reservations increasingly voiced about negative-emission technologies [...] turn out to be valid, the weakening of near-term mitigation and the failure of future negative-emission technologies will be a prelude to rapid temperature rises reminiscent of the 4\textdegree C '' business as usual'' pathway feared before the Paris Agreement. [] Negative-emission technologies are not an insurance policy, but rather an unjust and high-stakes gamble. There is a real risk they will be unable to deliver on the scale of their promise. If the emphasis on equity and risk aversion embodied in the Paris Agreement are to have traction, negative-emission technologies should not form the basis of the mitigation agenda. [...] They could very reasonably be the subject of research, development, and potentially deployment, but the mitigation agenda should proceed on the premise that they will not work at scale. The implications of failing to do otherwise are a moral hazard par excellence.}, journal = {Science}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-14161174,~to-add-doi-URL,biodiversity,bioenergy,carbon-capture-and-storage,carbon-dioxide-removal,carbon-emissions,climate-change,environment-society-economy,ghg,global-warming,integrated-modelling,negative-emissions,policy-strategies-for-scientific-uncertainty,science-ethics,science-policy-interface,science-society-interface,sustainability,technology,terminology,trade-offs,uncertainty,unknown}, lccn = {INRMM-MiD:c-14161174}, number = {6309} }
@article{barhamInternationalPlantSentinel2016, title = {The {{International Plant Sentinel Network}}: A Tool for Regional and National Plant Protection Organizations}, author = {Barham, E. and Sharrock, S. and Lane, C. and Baker, R.}, year = {2016}, month = apr, volume = {46}, pages = {156--162}, issn = {0250-8052}, doi = {10.1111/epp.12283}, abstract = {[Excerpt:Introduction] A 2011 global survey of botanic gardens and arboreta, which included 204 respondents from 146 institutes, revealed that the botanic garden community has the potential to play a significant role in safeguarding plant health. However, responding institutes cited a lack of available training, resources and coordination to support any such work (Kramer \& Hird 2011). Since its launch in November 2013, the International Plant Sentinel Network (IPSN) has been working to provide this support and illustrate the usefulness of such a global network to those working within plant health. The IPSN's ultimate aim is to provide an early warning system for new and emerging pest and pathogen risks. [] The IPSN is a developing network of botanic gardens, arboreta, National Plant Protection Organizations (NPPOs) and plant health scientists. Funded through EUPHRESCO (EUropean PHytosanitary RESearch COordination) Phytosanitary ERA-Net, the project is now coming to the end of its initial 3 years. During this time, the IPSN has worked to promote engagement, increase awareness and provide the resources to support gardens in carrying out plant health research. The next phase of the IPSN will be to coordinate and facilitate sentinel research projects around the world, working in close collaboration with gardens, NPPOs and scientists. In this way, valuable information will be collected in botanic gardens to aid plant health, for example for pest risk analysis (PRA) and other methods of assessing risk. [] [...] [IPSN participation and coordination] In the UK the EUPHRESCO project has been funded by the UK's Department for the Environment, Food and Rural Affairs (DEFRA) and led by FERA, with CABI-UK and Forest Research (UK). Other project partners are the Julius K\"uhn-Institut (Germany), the Plant Protection Services (Netherlands) and the Department for Innovation in Biological, Agro-food and Forest Systems, University of Tuscia (Italy). The network is, and will continue to be, coordinated by Botanic Gardens Conservation International (BGCI). [] BGCI is an international consortium of botanic gardens and arboreta dedicated to conserving global plant diversity. The organization, which was established in 1987, sits at the heart of over 3000 gardens worldwide and has much experience coordinating and supporting the work of these institutes. [...] [] [...] [Conclusions] Sentinel research can provide scientists, RPPOs and NPPOs with important information required for making statutory decisions and developing management programmes for damaging plant pests and diseases. Botanic gardens and arboreta provide unique resources in which to carry out this research, and have in the past demonstrated on a small scale the potential role they can play. However, providing support and coordination on a global level can help to realize their true worth to plant health. In the last 3 years the IPSN has developed many of the tools required to engage and support the botanic garden community in this area. The project has raised awareness and understanding around the world, and developed a network of interested and willing individual and institutional participants. The next step of the IPSN will be to facilitate research and provide meaningful and valuable data to NPPOs and RPPOS across the world. To do this, the IPSN needs engagement and support from such government organizations, both financial and in kind. The ultimate goal will be to create a sustainable network, coordinated by BGCI, supported by NPPOs, RPPOs and plant health scientists, but led by the gardens themselves.}, journal = {EPPO Bulletin}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-14128453,field-measurements,forest-pests,forest-resources,global-scale,monitoring,plant-pests,risk-assessment,uncertainty,unknown,vegetation}, lccn = {INRMM-MiD:c-14128453}, number = {1} }
@inproceedings{moshi_optimization_2016, title = {Optimization of integrated design and operation of microgrids under uncertainty}, doi = {10.1109/PSCC.2016.7540870}, abstract = {We present two Mixed-Integer Linear Programming (MILP) models for a complete microgrid planning problem which consider uncertainties in the main input data (hourly solar irradiance, wind speed and electricity demand). The first model adopts a Two-Stage Stochastic Integer Programming (2SSIP) formulation with discrete scenarios, whereas the second model adopts a Robust Optimization (RO) formulation with polyhedral uncertainty sets. The aim is to determine the optimal combination, capacities, and number of components to install in the microgrid considering long-term operations and uncertainty in the main input data. The 2SSIP model offers the possibility to obtain a planning solution using discrete scenarios sampled from appropriate probability distributions. The RO model gives a planning solution which is guaranteed to be feasible for any realization of input data within specified uncertainty sets. To show and compare the effectiveness of these models, we present a case study in which we apply the two models to plan a standalone microgrid in Singida, Tanzania. The proposed models can be applied for planning and detailed feasibility studies on generic microgrids with renewables, storage batteries and diesel generators.}, booktitle = {2016 {Power} {Systems} {Computation} {Conference} ({PSCC})}, author = {Moshi, G. G. and Bovo, C. and Berizzi, A. and Taccari, L.}, month = jun, year = {2016}, keywords = {2SSIP formulation, Batteries, Biological system modeling, Linear programming, MILP models, Microgrids, Optimization, Planning, RO formulation, Uncertainty, distributed power generation, integer programming, linear programming, microgrid design optimization, microgrid operation, microgrid planning, microgrid planning problem, mixed-integer linear programming, optimization, polyhedral uncertainty sets, power distribution planning, probability distributions, robust optimization, robust optimization formulation, statistical distributions, stochastic integer programming, two-stage stochastic integer programming, uncertainty}, pages = {1--7} }
@article{allisonReproducibilityTragedyErrors2016, title = {Reproducibility: A Tragedy of Errors}, author = {Allison, David B. and Brown, Andrew W. and George, Brandon J. and Kaiser, Kathryn A.}, year = {2016}, month = feb, volume = {530}, pages = {27--29}, issn = {0028-0836}, doi = {10.1038/530027a}, abstract = {Mistakes in peer-reviewed papers are easy to find but hard to fix, report David B. Allison and colleagues. [Excerpt: Three common errors] As the influential twentieth-century statistician Ronald Fisher (pictured) said: '' To consult the statistician after an experiment is finished is often merely to ask him to conduct a post mortem examination. He can perhaps say what the experiment died of.'' [] [...] Frequent errors, once recognized, can be kept out of the literature with targeted education and policies. Three of the most common are outlined below. These and others are described in depth in an upcoming publication7. [::1. Mistaken design or analysis of cluster-randomized trials] In these studies, all participants in a cluster (for example, a cage, school or hospital) are given the same treatment. The number of clusters (not just the number of individuals) must be incorporated into the analysis. Otherwise, results often seem, falsely, to be statistically significant8, 9. Increasing the number of individuals within clusters can increase power, but the gains are minute compared with increasing clusters. Designs with only one cluster per treatment are not valid as randomized experiments, regardless of how many individuals are included. [::2. Miscalculation in meta-analyses] Effect sizes are often miscalculated when meta-analysts are confronted with incomplete information and do not adapt appropriately. Another problem is confusion about how to calculate the variance of effects. Different study designs and meta-analyses require different approaches. Incorrect or inconsistent choices can change effect sizes, study weighting or the overall conclusions4. [::3. Inappropriate baseline comparisons] In at least six articles, authors tested for changes from the baseline in separate groups; if one was significant and one not, the authors (wrongly) proposed a difference between groups. Rather than comparing 'differences in nominal significance' (the DINS error) differences between groups must be compared directly. For studies comparing two equal-sized groups, the DINS error can inflate the false-positive rate from 5\,\% to as much as 50\,\% (ref. 10). [] [...]}, journal = {Nature}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13924997,~to-add-doi-URL,bias-correction,cognitive-biases,data-collection-bias,peer-review,post-publication-peer-review,publication-errors,reproducible-research,research-management,science-ethics,statistics,uncertainty,uncertainty-propagation}, lccn = {INRMM-MiD:c-13924997}, number = {7588} }
@article{gatt2016multilingual, Author = {Gatt, A and Portet, F}, Journal = {Fuzzy Sets and Systems}, Pages = {73--93}, Title = {Multilingual generation of uncertain temporal expressions from data: A study of a possibilistic formalism and its consistency with human subjective evaluations}, Url = {./pubs/fss2015_uncertainty.pdf}, Volume = {285}, Year = {2016}, Keywords = {natural language generation, data-to-text, uncertainty, fuzzy sets}}
@Article{Skarlatidis_2015_17615, author = {Skarlatidis, Anastasios and Paliouras, Georgios and Artikis, Alexander and Vouros, George A.}, address = {New York, NY, USA}, journal = {ACM Trans. Comput. Logic}, month = {feb}, number = {2}, pages = {11:1--11:37}, publisher = {ACM}, title = {Probabilistic event calculus for event recognition}, volume = {16}, year = {2015}, issn = {1529-3785}, keywords = {Events, machine learning, probabilistic inference, uncertainty}, url = {http://doi.acm.org/10.1145/2699916}, doi = {10.1145/2699916}, title_with_no_special_chars = {Probabilistic Event Calculus for Event Recognition} }
@article{bond_managing_2015, title = {Managing uncertainty, ambiguity and ignorance in impact assessment by embedding evolutionary resilience, participatory modelling and adaptive management}, volume = {151}, issn = {0301-4797}, url = {http://www.sciencedirect.com/science/article/pii/S0301479714006094}, doi = {10.1016/j.jenvman.2014.12.030}, abstract = {In the context of continuing uncertainty, ambiguity and ignorance in impact assessment (IA) prediction, the case is made that existing IA processes are based on false ‘normal’ assumptions that science can solve problems and transfer knowledge into policy. Instead, a ‘post-normal science’ approach is needed that acknowledges the limits of current levels of scientific understanding. We argue that this can be achieved through embedding evolutionary resilience into IA; using participatory workshops; and emphasising adaptive management. The goal is an IA process capable of informing policy choices in the face of uncertain influences acting on socio-ecological systems. We propose a specific set of process steps to operationalise this post-normal science approach which draws on work undertaken by the Resilience Alliance. This process differs significantly from current models of IA, as it has a far greater focus on avoidance of, or adaptation to (through incorporating adaptive management subsequent to decisions), unwanted future scenarios rather than a focus on the identification of the implications of a single preferred vision. Implementing such a process would represent a culture change in IA practice as a lack of knowledge is assumed and explicit, and forms the basis of future planning activity, rather than being ignored.}, urldate = {2015-01-02}, journal = {Journal of Environmental Management}, author = {Bond, Alan and Morrison-Saunders, Angus and Gunn, Jill A. E. and Pope, Jenny and Retief, Francois}, month = mar, year = {2015}, keywords = {Adaptive management, Ambiguity, Evolutionary resilience, Ignorance, Post-normal science, uncertainty}, pages = {97--104}, file = {ScienceDirect Full Text PDF:files/50467/Bond et al. - 2015 - Managing uncertainty, ambiguity and ignorance in i.pdf:application/pdf;ScienceDirect Full Text PDF:files/50470/Bond et al. - 2015 - Managing uncertainty, ambiguity and ignorance in i.pdf:application/pdf;ScienceDirect Snapshot:files/50466/Bond et al. - 2015 - Managing uncertainty, ambiguity and ignorance in i.html:text/html;ScienceDirect Snapshot:files/50469/Bond et al. - 2015 - Managing uncertainty, ambiguity and ignorance in i.html:text/html} }
@article{ title = {Habitat suitability modelling of rare species using Bayesian networks: Model evaluation under limited data}, type = {article}, year = {2015}, keywords = {Bayesian belief networks,Ecological modeling,Endangered species,Model validation,Uncertainty}, pages = {64-78}, volume = {299}, websites = {http://www.sciencedirect.com/science/article/pii/S0304380014006103}, month = {3}, id = {5ff020c8-12b9-33e0-b574-0d4c42063017}, created = {2015-04-11T17:43:54.000Z}, accessed = {2015-01-06}, file_attached = {false}, profile_id = {95e10851-cdf3-31de-9f82-1ab629e601b0}, group_id = {71a29c65-85d2-3809-a3a1-fe4a94dc78d2}, last_modified = {2017-03-14T14:27:45.955Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, private_publication = {false}, abstract = {Paucity of data on rare species is a common problem, preventing the use of most approaches to model development and evaluation. This study demonstrates how models can be developed and different forms of evaluation can be performed despite a lack of sufficient data, by presenting a habitat suitability model for the rare Astacopsis gouldi, the giant freshwater crayfish. We use a Bayesian network approach that readily incorporates incomplete data and allows for the evaluation of uncertainties. To supplement the limited field data on A. gouldi, expert knowledge was elicited through surveys designed to provide probability values that described the strength of relationships between the habitat suitability of the species and three variables – elevation, upstream riparian condition and geomorphic condition – and credible intervals around those values. A series of 18 alternative models were developed based on the same model structure but parameterised using different sources – expert judgement, field data or a combination of the two. The models were evaluated by estimating and comparing their performance accuracy and sensitivity analysis results, and in assessing the assumptions underpinning each of the models. Using performance accuracy as a measure, the data-based and combined expert- and data-based models performed better than the expert-based models. The sensitivity analysis results show that geomorphic condition was the most influential variable in the majority of models and that elevation had minimal influence on the occurrence of A. gouldi. Overall the models were found to have large predictive uncertainties, although the modelling process itself revealed insights into the habitat suitability of the species and identified key knowledge and data gaps for future monitoring, management and research.}, bibtype = {article}, author = {Hamilton, Serena H. and Pollino, Carmel A. and Jakeman, Anthony J.}, doi = {10.1016/j.ecolmodel.2014.12.004}, journal = {Ecological Modelling} }
@article{reichert_conceptual_2015, title = {The conceptual foundation of environmental decision support}, volume = {154}, issn = {0301-4797}, url = {http://www.sciencedirect.com/science/article/pii/S0301479715001140}, doi = {10.1016/j.jenvman.2015.01.053}, abstract = {Environmental decision support intends to use the best available scientific knowledge to help decision makers find and evaluate management alternatives. The goal of this process is to achieve the best fulfillment of societal objectives. This requires a careful analysis of (i) how scientific knowledge can be represented and quantified, (ii) how societal preferences can be described and elicited, and (iii) how these concepts can best be used to support communication with authorities, politicians, and the public in environmental management. The goal of this paper is to discuss key requirements for a conceptual framework to address these issues and to suggest how these can best be met. We argue that a combination of probability theory and scenario planning with multi-attribute utility theory fulfills these requirements, and discuss adaptations and extensions of these theories to improve their application for supporting environmental decision making. With respect to (i) we suggest the use of intersubjective probabilities, if required extended to imprecise probabilities, to describe the current state of scientific knowledge. To address (ii), we emphasize the importance of value functions, in addition to utilities, to support decisions under risk. We discuss the need for testing “non-standard” value aggregation techniques, the usefulness of flexibility of value functions regarding attribute data availability, the elicitation of value functions for sub-objectives from experts, and the consideration of uncertainty in value and utility elicitation. With respect to (iii), we outline a well-structured procedure for transparent environmental decision support that is based on a clear separation of scientific prediction and societal valuation. We illustrate aspects of the suggested methodology by its application to river management in general and with a small, didactical case study on spatial river rehabilitation prioritization.}, urldate = {2015-03-09}, journal = {Journal of Environmental Management}, author = {Reichert, Peter and Langhans, Simone D. and Lienert, Judit and Schuwirth, Nele}, month = may, year = {2015}, keywords = {Environmental management, Intersubjective probabilities, Multi-attribute utility theory, Multi-attribute value theory, Multi-criteria decision analysis, River management, Societal decision support, Stakeholder involvement, uncertainty}, pages = {316--332}, file = {ScienceDirect Full Text PDF:files/51026/Reichert et al. - 2015 - The conceptual foundation of environmental decisio.pdf:application/pdf;ScienceDirect Full Text PDF:files/51030/Reichert et al. - 2015 - The conceptual foundation of environmental decisio.pdf:application/pdf;ScienceDirect Snapshot:files/51027/S0301479715001140.html:text/html;ScienceDirect Snapshot:files/51029/Reichert et al. - 2015 - The conceptual foundation of environmental decisio.html:text/html} }
@article{nicolaisen_roads_2015, title = {Roads to nowhere: {The} accuracy of travel demand forecasts for do-nothing alternatives}, volume = {37}, issn = {0967-070X}, shorttitle = {Roads to nowhere}, url = {http://www.sciencedirect.com/science/article/pii/S0967070X14002054}, doi = {10.1016/j.tranpol.2014.10.006}, abstract = {Impact appraisals of major transport infrastructure projects rely extensively on the accuracy of forecasts for the expected construction costs and aggregate travel time savings. The latter of these further depend on the accuracy of forecasts for the expected travel demand in both the do-something and do-nothing alternatives, in order to assess the impact of implementing new projects compared to doing nothing or postponing the decision. Previous research on the accuracy of travel demand forecasts has focused exclusively on the do-something alternatives, where inaccuracies have been revealed in the form of large imprecision as well as systematic biases. However, little or no attention has been given to the accuracy of demand forecasts for the do-nothing alternatives, which are equally important for impact appraisals. This paper presents the first ex-post evaluation of demand forecast accuracy for do-nothing alternatives, based on an empirical study of 35 road projects in Denmark and England. The results show a tendency for systematic overestimation of travel demand in the do-nothing alternatives, which is in contrast to the systematic underestimation of travel demand observed in previous studies of do-something alternatives. The main implication for planning practice is that the severity of future congestion problems is systematically overestimated. As a consequence, impact appraisals of road construction as a means of congestion relief appear overly beneficial.}, urldate = {2014-12-04}, journal = {Transport Policy}, author = {Nicolaisen, Morten Skou and Næss, Petter}, month = jan, year = {2015}, keywords = {Ex-post evaluation, Forecasts, Impact appraisal, Inaccuracy, Transport planning, uncertainty}, pages = {57--63}, file = {ScienceDirect Snapshot:files/50262/S0967070X14002054.html:text/html} }
@article{7089294, author = {Guo, H and Huang, J and Laidlaw, D H}, doi = {10.1109/TVCG.2015.2424872}, file = {:home/georg/Datenpartition/Studium MINF/Master/Masterarbeit/Literatur/Guo2015.pdf:pdf}, issn = {1077-2626}, journal = {IEEE Transactions on Visualization and Computer Graphics}, keywords = {Accuracy,Data visualization,Encoding,Image color analysis,Interference,Uncertainty,Visual variable,Visualization,color-based visual variable,color-based visual variables,computational geometry,data attributes,data visualisation,data visualization,dimensional integrality,discriminability level,focus-based visual variable,focus-based visual variables,fuzziness attribute,geometry-based visual variable,grain attribute,graph edge attributes,graph theory,graph uncertainty visualization,graph visualization,graph-task type,hue attribute,interference degree,intrinsic factor,lightness attribute,line graphical primitives,paired-visual variable evaluation,perception,saturation attribute,transparency attribute,uncertainty encoding,uncertainty representation,uncertainty visualization,user perception,visual variable,width attribute}, number = {10}, pages = {1173--1186}, title = {{Representing Uncertainty in Graph Edges: An Evaluation of Paired Visual Variables}}, volume = {21}, year = {2015} }
@article{liu_developing_2014, title = {Developing {GIOVANNI}-based online prototypes to intercompare {TRMM}-related global gridded-precipitation products}, volume = {66}, issn = {0098-3004}, url = {http://www.sciencedirect.com/science/article/pii/S009830041300318X}, doi = {10.1016/j.cageo.2013.12.012}, abstract = {New online prototypes have been developed to extend and enhance the previous effort by facilitating investigation of product characteristics and intercomparison of precipitation products in different algorithms as well as in different versions at different spatial scales ranging from local to global without downloading data and software. Several popular Tropical Rainfall Measuring Mission (TRMM) products and the TRMM Composite Climatology are included. In addition, users can download customized data in several popular formats for further analysis. Examples show product quality problems and differences in several monthly precipitation products. It is seen that differences in daily and monthly precipitation products are distributed unevenly in space and it is necessary to have tools such as those presented here for customized and detailed investigations. A simple time series and two area maps allow the discovery of abnormal values of 3A25 in one of the months. An example shows a V-shaped valley issue in the Version 6 3B43 time series and another example shows a sudden drop in 3A25 monthly rain rate, all of which provide important information when the products are used for long-term trend studies. Future plans include adding more products and statistical functionality in the prototypes.}, urldate = {2019-01-23}, journal = {Computers \& Geosciences}, author = {Liu, Zhong and Ostrenga, Dana and Teng, William and Kempler, Steven and Milich, Lenard}, month = may, year = {2014}, keywords = {Intercomparison, Online tools, Precipitation, Satellite remote sensing, TRMM, Uncertainty}, pages = {168--181}, file = {ScienceDirect Full Text PDF:/Volumes/mini-disk1/Google Drive/_lib/zotero/storage/4ITHXAA5/Liu et al. - 2014 - Developing GIOVANNI-based online prototypes to int.pdf:application/pdf;ScienceDirect Snapshot:/Volumes/mini-disk1/Google Drive/_lib/zotero/storage/NPCK3CEV/S009830041300318X.html:text/html} }
@article{natureTrickLight2014, title = {Trick of the Light}, author = {{Nature}}, year = {2014}, month = feb, volume = {506}, pages = {6}, issn = {0028-0836}, doi = {10.1038/506006b}, abstract = {The Amazon doesn't absorb extra carbon in the dry season after all. It can become a carbon source.}, journal = {Nature}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12972834,amazonia,carbon-cycle,forest-resources,modelling,modelling-uncertainty,organic-carbon,precipitation,remote-sensing,solar-radiation,uncertainty}, lccn = {INRMM-MiD:c-12972834}, number = {7486} }
@article{ citeulike:13133263, abstract = {Soil erosion by water is one of the most widespread forms of soil degradation. The loss of soil as a result of erosion can lead to decline in organic matter and nutrient contents, breakdown of soil structure and reduction of the water holding capacity. Measuring soil loss across the whole landscape is impractical and thus research is needed to improve methods of estimating soil erosion with computational modelling, upon which integrated assessment and mitigation strategies may be based. Despite the efforts, the prediction value of existing models is still limited, especially at regional and continental scale. A new approach for modelling soil erosion at large spatial scale is here proposed. It is based on the joint use of low data demanding models and innovative techniques for better estimating model inputs. The proposed modelling architecture has at its basis the semantic array programming paradigm and a strong effort towards computational reproducibility. An extended version of the Revised Universal Soil Loss Equation ({RUSLE}) has been implemented merging different empirical rainfall-erosivity equations within a climatic ensemble model and adding a new factor for a better consideration of soil stoniness within the model. {Pan-European} soil erosion rates by water have been estimated through the use of publicly available datasets and locally reliable empirical relationships. The accuracy of the results is corroborated by a visual plausibility check (63% of a random sample of grid cells are accurate, 83% at least moderately accurate, bootstrap p ≤ 0.05). A comparison with country level statistics of pre-existing European maps of soil erosion by water is also provided.}, author = {Bosco, Claudio and de Rigo, Daniele and Dewitte, Olivier and Poesen, Jean and Panagos, Panos}, citeulike-article-id = {13133263}, citeulike-linkout-0 = {http://dx.doi.org/10.5194/nhessd-2-2639-2014}, citeulike-linkout-1 = {http://scholar.google.com/scholar?cluster=15027909135920483528}, citeulike-linkout-2 = {http://dx.doi.org/10.5194/nhessd-2-2639-2014}, day = {11}, doi = {10.5194/nhessd-2-2639-2014}, issn = {2195-9269}, journal = {Natural Hazards and Earth System Sciences Discussions}, keywords = {bias-toward-primacy-of-theory-over-reality, computational-science, continental-scale, data-integration, empirical-equation, ensemble, environmental-modelling, erosivity, europe, geospatial-semantic-array-programming, gis, integrated-modelling, knowledge-integration, mastrave-modelling-library, modelling, relative-distance-similarity, reproducibility, reproducible-research, rusle, semantic-array-programming, soil-erosion, soil-resources, stoniness, uncertainty, visual-assessment}, month = {April}, number = {4}, pages = {2639--2680}, posted-at = {2014-04-11 08:54:44}, priority = {2}, title = {Modelling soil erosion at {E}uropean scale: towards harmonization and reproducibility}, url = {http://dx.doi.org/10.5194/nhessd-2-2639-2014}, volume = {2}, year = {2014} }
@article{boscoVisualValidationERUSLE2014, title = {Visual Validation of the E-{{RUSLE}} Model Applied at the Pan-{{European}} Scale}, author = {Bosco, Claudio and {de Rigo}, Daniele and Dewitte, Olivier}, year = {2014}, volume = {1}, pages = {mri11a13+}, doi = {10.6084/m9.figshare.844627}, abstract = {Validating soil erosion estimates at regional or larger scale is still extremely challenging. The common procedures are not technically and financially applicable for large spatial extents, despite this some options are still applicable. For validating the European map of soil erosion by water calculated using the approach proposed in Bosco et al. [1] we applied alternative qualitative methods based on visual evaluation. The 1 km 2 map was validated through a visual and categorical comparison between modelled and observed soil erosion. A procedure employing high-resolution Google Earth images and pictures as validation data is here shown. The resolution of the images, rapidly increased during the last years, allows for a visual qualitative estimation of local soil erosion rates. A cluster of 3x3 K m 2 around 85 selected points was analysed by the authors. The results corroborate the map obtained applying the e-RUSLE model. The 63\,\% of a random sample of 732 grid cells are accurate, 83\,\% at least moderately accurate with a bootstrap p {$\leq$} 0.05). For each of the 85 clusters, the complete details of the validation also containing the comments of the evaluators and the geo-location of the analysed areas have been reported.}, journal = {Scientific Topics Focus}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13340371,bootstrap,e-rusle,europe,featured-publication,land-cover,remote-sensing,rusle,scientific-topics-focus,semantic-array-programming,semap,soil-erosion,soil-resources,statistics,uncertainty,validation,visual-assessment,visual-interpretation}, lccn = {INRMM-MiD:c-13340371}, series = {Notes {{Transdiscipl}}. {{Model}}. {{Env}}.} }
@article{yemshanovThereNoSilver2014, title = {There Is No Silver Bullet: The Value of Diversification in Planning Invasive Species Surveillance}, author = {Yemshanov, Denys and Koch, Frank H. and Lu, Bo and Lyons, D. Barry and Prestemon, Jeffrey P. and Scarr, Taylor and Koehler, Klaus}, year = {2014}, month = aug, volume = {104}, pages = {61--72}, issn = {0921-8009}, doi = {10.1016/j.ecolecon.2014.04.024}, abstract = {[Highlights] [::] We consider short-term surveillance of an invasive pest in a diverse landscape. [::] Our case study is focused on the survey of emerald ash borer expansion in Canada. [::] The spread of the invader is described by distribution-model-based estimates. [::] A portfolio framework was applied to allocate resources for pest surveillance. [::] Diversification makes the survey less subject to errors in spread estimates. [Abstract] In this study we demonstrate how the notion of diversification can be used in broad-scale resource allocation for surveillance of invasive species. We consider the problem of short-term surveillance for an invasive species in a geographical environment. We find the optimal allocation of surveillance resources among multiple geographical subdivisions via application of a classical portfolio framework, which allocates investments among multiple financial asset types with uncertain returns in a portfolio that maximizes the performance and, by meeting the desired diversification targets, protects against errors in estimating the portfolio's performance. [\textbackslash n] We illustrate the approach with a case study that applies a spatial transmission model to assess the risk of spread of the emerald ash borer (EAB), a significant pest in North America, with infested firewood that may be carried by visitors to campground facilities in central Canada. Adding the diversification objective yields an expected survey performance that is comparable with undiversified optimal allocation, but more importantly, makes the geographical distribution of survey priorities less subject to possible errors in the spread rate estimates. Overall, diversification of pest surveillance can be viewed as a viable short-term strategy for hedging against uncertainty in expert- and model-based assessments of pest invasion risk.}, journal = {Ecological Economics}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13199976,communicating-uncertainty,invasive-species,modelling-uncertainty,multi-scale,plant-pests,rapid-assessment,risk-assessment,robust-modelling,silver-bullet,uncertainty}, lccn = {INRMM-MiD:c-13199976} }
@article{zastrowCrisisMappersTurn2014, title = {Crisis Mappers Turn to Citizen Scientists}, author = {Zastrow, Mark}, year = {2014}, month = nov, volume = {515}, pages = {321}, issn = {0028-0836}, doi = {10.1038/515321a}, abstract = {Crowdsourced disaster surveys strive for more reliability in online collaboration. [Excerpt] When Typhoon Haiyan barrelled into the Philippines on 8 November 2013, more than 1,600 volunteers leapt to their laptops to make 4.5 million edits to OpenStreetMap, an online, open global map. Working from satellite imagery, the volunteers created maps for stricken areas of the islands, and tagged buildings that seemed to have been damaged or destroyed. The maps were used to help aid workers to navigate the terrain, and the damage assessments were passed to relief organizations to direct aid workers and supplies. Although the maps proved invaluable, the damage assessments were poor. '' The results were terrible,'' Dale Kunce, a geospatial engineer at the American Red Cross, told the International Conference of Crisis Mappers in New York City on the anniversary of Haiyan's landfall. Crisis mappers see the experience not as a setback but as a valuable lesson. The take-home message, Kunce said, '' is that if we'd done a couple things differently, the quality would have been much higher''.}, journal = {Nature}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13433890,~to-add-doi-URL,assessment,citizen-science,citizen-sensor,crisis,data-uncertainty,modelling-uncertainty,rapid-assessment,uncertainty,visual-assessment}, lccn = {INRMM-MiD:c-13433890}, number = {7527} }
@article{glotterEvaluatingUtilityDynamical2014, title = {Evaluating the Utility of Dynamical Downscaling in Agricultural Impacts Projections}, author = {Glotter, Michael and Elliott, Joshua and McInerney, David and Best, Neil and Foster, Ian and Moyer, Elisabeth J.}, year = {2014}, month = jun, volume = {111}, pages = {8776--8781}, issn = {1091-6490}, doi = {10.1073/pnas.1314787111}, abstract = {Interest in estimating the potential socioeconomic costs of climate change has led to the increasing use of dynamical downscaling -- nested modeling in which regional climate models (RCMs) are driven with general circulation model (GCM) output -- to produce fine-spatial-scale climate projections for impacts assessments. We evaluate here whether this computationally intensive approach significantly alters projections of agricultural yield, one of the greatest concerns under climate change. Our results suggest that it does not. We simulate US maize yields under current and future CO2 concentrations with the widely used Decision Support System for Agrotechnology Transfer crop model, driven by a variety of climate inputs including two GCMs, each in turn downscaled by two RCMs. We find that no climate model output can reproduce yields driven by observed climate unless a bias correction is first applied. Once a bias correction is applied, GCM- and RCM-driven US maize yields are essentially indistinguishable in all scenarios ({$<$}10\,\% discrepancy, equivalent to error from observations). Although RCMs correct some GCM biases related to fine-scale geographic features, errors in yield are dominated by broad-scale (100s of kilometers) GCM systematic errors that RCMs cannot compensate for. These results support previous suggestions that the benefits for impacts assessments of dynamically downscaling raw GCM output may not be sufficient to justify its computational demands. Progress on fidelity of yield projections may benefit more from continuing efforts to understand and minimize systematic error in underlying climate projections.}, journal = {Proceedings of the National Academy of Sciences}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13208270,agricultural-resources,assessment,bias-correction,climate-change,climate-projections,downscaling,dynamic-downscaling,global-climate-models,modelling,modelling-uncertainty,prediction-bias,premature-optimization,regional-climate-models,uncertainty}, lccn = {INRMM-MiD:c-13208270}, number = {24} }
@article{joughinMarineIceSheet2014, title = {Marine Ice Sheet Collapse Potentially under Way for the {{Thwaites Glacier Basin}}, {{West Antarctica}}}, author = {Joughin, Ian and Smith, Benjamin E. and Medley, Brooke}, year = {2014}, month = may, volume = {344}, pages = {735--738}, issn = {1095-9203}, doi = {10.1126/science.1249055}, abstract = {The West Antarctic Ice Sheet (WAIS) is particularly vulnerable to ocean warming-induced collapse. The Thwaites Glacier of West Antarctica is one of the largest WAIS regional contributors to sea level rise, and has been considered to be potentially unstable for many years. Joughin et al. (p. 735) used a combination of a numerical model and observations of its recent geometry and movement to investigate the stability of the Thwaites Glacier. The glacier has already entered the early stages of collapse, and rapid and irreversible collapse is likely in the next 200 to 1000 years.}, journal = {Science}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13166867,antarctic-region,antarctic-sea-ice,climate-change,computational-science,instability,modelling,sea-level,uncertainty,west-antartica}, lccn = {INRMM-MiD:c-13166867}, number = {6185} }
@article{lindnerClimateChangeEuropean2014, title = {Climate Change and {{European}} Forests: What Do We Know, What Are the Uncertainties, and What Are the Implications for Forest Management?}, author = {Lindner, Marcus and Fitzgerald, Joanne B. and Zimmermann, Niklaus E. and Reyer, Christopher and Delzon, Sylvain and {van der Maaten}, Ernst and Schelhaas, Mart-Jan and Lasch, Petra and Eggers, Jeannette and {van der Maaten-Theunissen}, Marieke and Suckow, Felicitas and Psomas, Achilleas and Poulter, Benjamin and Hanewinkel, Marc}, year = {2014}, month = dec, volume = {146}, pages = {69--83}, issn = {0301-4797}, doi = {10.1016/j.jenvman.2014.07.030}, abstract = {[Highlights] [::] Uncertainty is inherent to climate change impact assessments. [::] Extreme events are only weakly represented in many assessments. [::] The range of possible impacts has so far been underestimated in most studies. [::] Some general trends are common to all climate projections. [::] Guidance is needed to interpret state-of-the-art knowledge and give helpful advice. [Abstract] The knowledge about potential climate change impacts on forests is continuously expanding and some changes in growth, drought induced mortality and species distribution have been observed. However despite a significant body of research, a knowledge and communication gap exists between scientists and non-scientists as to how climate change impact scenarios can be interpreted and what they imply for European forests. It is still challenging to advise forest decision makers on how best to plan for climate change as many uncertainties and unknowns remain and it is difficult to communicate these to practitioners and other decision makers while retaining emphasis on the importance of planning for adaptation.}, journal = {Journal of Environmental Management}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13354308,climate-change,duplicated-entry-to-be-removed,forest-management,forest-resources,habitat-suitability,niche-modelling,uncertainty}, lccn = {INRMM-MiD:c-13354308} }
@ARTICLE{tmi2014b, AUTHOR = {Shawn Andrews and Neda Changizi and Ghassan Hamarneh}, JOURNAL = {IEEE Transactions on Medical Imaging (IEEE TMI)}, OPTMONTH = {}, OPTNOTE = {}, NUMBER = {9}, PAGES = {1890-1899}, TITLE = {The Isometric Log-Ratio Transform for Probabilistic Multi-Label Anatomical Shape Representation}, VOLUME = {33}, YEAR = {2014}, OPTABSTRACT = {}, DOI = {10.1109/TMI.2014.2325596}, OPTISBN = {}, OPTISSN = {}, KEYWORDS = {Shape Modelling and Analysis, Segmentation, Uncertainty}, OPTURL = {}, OPTURL-PUBLISHER = {}, PDF = {http://www.cs.sfu.ca/~hamarneh/ecopy/tmi2014b.pdf} }
@article{ id = {eda5fb0a-1dc5-3703-9030-c61c4a44f622}, title = {The future viability of algae-derived biodiesel under economic and technical uncertainties}, type = {article}, year = {2014}, identifiers = {[object Object]}, keywords = {Algae,Biodiesel,HDMR,Techno-economic assessment,Uncertainty}, created = {2015-04-08T12:25:35.000Z}, pages = {166-173}, volume = {151}, websites = {http://dx.doi.org/10.1016/j.biortech.2013.10.062}, publisher = {Elsevier Ltd}, file_attached = {false}, profile_id = {14c0bef2-6ca7-3c04-b2ac-61e1821d8cd9}, last_modified = {2015-04-08T21:21:29.000Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Brownbridge2014}, abstract = {This study presents a techno-economic assessment of algae-derived biodiesel under economic and technical uncertainties associated with the development of algal biorefineries. A global sensitivity analysis was performed using a High Dimensional Model Representation (HDMR) method. It was found that, considering reasonable ranges over which each parameter can vary, the sensitivity of the biodiesel production cost to the key input parameters decreases in the following order: algae oil content > algae annual productivity per unit area > plant production capacity > carbon price increase rate. It was also found that the Return on Investment (ROI) is highly sensitive to the algae oil content, and to a lesser extent to the algae annual productivity, crude oil price and price increase rate, plant production capacity, and carbon price increase rate. For a large scale plant (100,000. tonnes of biodiesel per year) the production cost of biodiesel is likely to be £0.8-1.6 per kg. © 2013 Elsevier Ltd.}, bibtype = {article}, author = {Brownbridge, George and Azadi, Pooya and Smallbone, Andrew and Bhave, Amit and Taylor, Benjamin and Kraft, Markus}, journal = {Bioresource Technology} }
@article{rodriguez-aserettoFreeOpenSource2013, title = {Free and {{Open Source Software}} Underpinning the {{European Forest Data Centre}}}, author = {{Rodriguez-Aseretto}, Dario and Di Leo, Margherita and {de Rigo}, Daniele and Corti, Paolo and McInerney, Daniel and Camia, Andrea and {San-Miguel-Ayanz}, Jes{\'u}s}, year = {2013}, volume = {15}, pages = {12101+}, issn = {1607-7962}, doi = {10.6084/m9.figshare.155700}, abstract = {Worldwide, governments are growingly focusing on free and open source software (FOSS) as a move toward transparency and the freedom to run, copy, study, change and improve the software. The European Commission (EC) is also supporting the development of FOSS [...]. In addition to the financial savings, FOSS contributes to scientific knowledge freedom in computational science (CS) and is increasingly rewarded in the science-policy interface within the emerging paradigm of open science. Since complex computational science applications may be affected by software uncertainty, FOSS may help to mitigate part of the impact of software errors by CS community- driven open review, correction and evolution of scientific code. The continental scale of EC science-based policy support implies wide networks of scientific collaboration. Thematic information systems also may benefit from this approach within reproducible integrated modelling. This is supported by the EC strategy on FOSS: "for the development of new information systems, where deployment is foreseen by parties outside of the EC infrastructure, FOSS will be the preferred choice and in any case used whenever possible". The aim of this contribution is to highlight how a continental scale information system may exploit and integrate FOSS technologies within the transdisciplinary research underpinning such a complex system. A European example is discussed where FOSS innervates both the structure of the information system itself and the inherent transdisciplinary research for modelling the data and information which constitute the system content. [...]}, journal = {Geophysical Research Abstracts}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-11988844,computational-science,data-transformation-modelling,environmental-modelling,europe,free-scientific-knowledge,free-scientific-software,free-software,gdal,geospatial,geospatial-semantic-array-programming,gis,gnu-octave,gnu-r,guidos-mspa,integrated-modelling,integrated-natural-resources-modelling-and-management,mastrave-modelling-library,modelling,modelling-uncertainty,numpy,open-science,pktools,python,robust-modelling,science-policy-interface,scipy,semantic-array-programming,semantics,semap,software-engineering,software-errors,software-uncertainty,system-engineering,uncertainty}, lccn = {INRMM-MiD:c-11988844}, series = {Geophysical {{Research Abstracts}}} }
@article{soderbergRisingPolicyConflicts2013, title = {Rising Policy Conflicts in {{Europe}} over Bioenergy and Forestry}, author = {S{\"o}derberg, Charlotta and Eckerberg, Katarina}, year = {2013}, month = aug, volume = {33}, pages = {112--119}, issn = {1389-9341}, doi = {10.1016/j.forpol.2012.09.015}, abstract = {[Highlights] [::] EU Bioenergy policy cuts across forest, agriculture, energy and transport sectors. [::] Increased pressure on forest biomass risks putting EU in a wood-deficit situation. [::] Bioenergy conflicts regard land use, biodiversity, climate and sustainability. [::] Conflicts on environmental consequences from bioenergy policy are reconcilable. [::] Conflicts on globally shared rights and responsibilities are not easily reconciled. [Abstract] Growing concerns over emissions of green-house gases causing climate change as well as energy security concerns have spurred the interest in bioenergy production pushed by EU targets to fulfil the goal of 20~per cent renewable energy in 2020, as well as the goal of 10~per cent renewable fuels in transport by 2020. Increased bioenergy production is also seen to have political and economic benefits for rural areas and farming regions in Europe and in the developing world. There are, however, conflicting views on the potential benefits of large scale bioenergy production, and recent debates have also drawn attention to a range of environmental and socio-economic issues that may arise in this respect. One of these challenges will be that of accommodating forest uses - including wood for energy, and resulting intensification of forest management - with biodiversity protection in order to meet EU policy goals. We note that the use of biomass and biofuels spans over several economic sector policy areas, which calls for assessing and integrating environmental concerns across forest, agriculture, energy and transport sectors. In this paper, we employ frame analysis to identify the arguments for promoting bioenergy and assess the potential policy conflicts in the relevant sectors, through the analytical lens of environmental policy integration. We conclude that while there is considerable leverage of environmental arguments in favour of bioenergy in the studied economic sectors, and potential synergies with other policy goals, environmental interest groups remain sceptical to just how bioenergy is currently being promoted. There is a highly polarised debate particularly relating to biofuel production. Based on our analysis, we discuss the potential for how those issues could be reconciled drawing on the frame conflict theory, distinguishing between policy disagreements and policy controversies.}, journal = {Forest Policy and Economics}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-11738844,~to-add-doi-URL,bioenergy,biomass,europe,forest-resources,ghg,science-policy-interface,uncertainty}, lccn = {INRMM-MiD:c-11738844} }
@article{ naranjo_using_2013, title = {Using heat as a tracer to estimate spatially distributed mean residence times in the hyporheic zone of a riffle-pool sequence}, issn = {1944-7973}, url = {http://dx.doi.org/10.1002/wrcr.20306}, doi = {10.1002/wrcr.20306}, journal = {Water Resources Research}, author = {Naranjo, Ramon C. and Pohll, Greg and Niswonger, Richard G. and Stone, Mark and Mckay, Alan}, year = {2013}, keywords = {1830 Groundwater/surface water interaction, 1832 Groundwater transport, 1847 Modeling, 1873 Uncertainty assessment, heat as a tracer, hyporheic zone, residence time, uncertainty}, pages = {n/a--n/a} }
@article{natureHiddenHeat2013, title = {Hidden Heat}, author = {{Nature}}, year = {2013}, month = aug, volume = {500}, pages = {501}, issn = {0028-0836}, doi = {10.1038/500501a}, abstract = {Scientists are homing in on the reasons for the current hiatus in global warming, but all must recognize that the long-term risk of warming from carbon dioxide remains high.}, journal = {Nature}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12601813,climate,climate-projections,global-scale,global-warming,modelling-uncertainty,temperature,uncertainty}, lccn = {INRMM-MiD:c-12601813}, number = {7464} }
@incollection{esfahaniUncertaintySelfAdaptiveSoftware2013, title = {Uncertainty in {{Self}}-{{Adaptive Software Systems}}}, booktitle = {Lecture {{Notes}} in {{Computer Science}}}, author = {Esfahani, Naeem and Malek, Sam}, editor = {Lemos, Rog{\'e}rio and Giese, Holger and M{\"u}ller, Hausi A. and Shaw, Mary}, year = {2013}, volume = {7475}, pages = {214--238}, publisher = {{Springer Berlin Heidelberg}}, issn = {0302-9743}, doi = {10.1007/978-3-642-35813-5\\_9}, abstract = {The ever-growing complexity of software systems coupled with their stringent availability requirements are challenging the manual management of software after its deployment. This has motivated the development of self-adaptive software systems. Self-adaptation endows a software system with the ability to satisfy certain objectives by automatically modifying its behavior at runtime. While many promising approaches for the construction of self-adaptive software systems have been developed, the majority of them ignore the uncertainty underlying the adaptation. This has been one of the key inhibitors to widespread adoption of self-adaption techniques in risk-averse real-world applications. Uncertainty in this setting is a vaguely understood term. In this paper, we characterize the sources of uncertainty in self-adaptive software system, and demonstrate its impact on the system's ability to satisfy its objectives. We then provide an alternative notion of optimality that explicitly incorporates the uncertainty underlying the knowledge (models) used for decision making. We discuss the state-of-the-art for dealing with uncertainty in this setting, and conclude with a set of challenges, which provide a road map for future research.}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12272697,adaptive-control,computational-science,homeostasis,model-drift,optimisation,self-adaptive-systems,software-errors,software-uncertainty,uncertainty}, lccn = {INRMM-MiD:c-12272697}, series = {Lecture {{Notes}} in {{Computer Science}}} }
@article{hannartDisconcertingLearningClimate2013, title = {Disconcerting Learning on Climate Sensitivity and the Uncertain Future of Uncertainty}, author = {Hannart, Alexis and Ghil, Michael and Dufresne, Jean-Louis and Naveau, Philippe}, year = {2013}, volume = {119}, pages = {585--601}, issn = {1573-1480}, doi = {10.1007/s10584-013-0770-z}, abstract = {How will our estimates of climate uncertainty evolve in the coming years, as new learning is acquired and climate research makes further progress? As a tentative contribution to this question, we argue here that the future path of climate uncertainty may itself be quite uncertain, and that our uncertainty is actually prone to increase even though we learn more about the climate system. We term disconcerting learning this somewhat counter-intuitive process in which improved knowledge generates higher uncertainty. After recalling some definitions, this concept is connected with the related concept of negative learning that was introduced earlier by Oppenheimer et al. (Clim Change 89:155-172, 2008). We illustrate disconcerting learning on several real-life examples and characterize mathematically certain general conditions for its occurrence. We show next that these conditions are met in the current state of our knowledge on climate sensitivity, and illustrate this situation based on an energy balance model of climate. We finally discuss the implications of these results on the development of adaptation and mitigation policy.}, journal = {Climatic Change}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12515508,climate-change,communicating-uncertainty,disconcerting-learning,negative-learning,non-linearity,progressive-learning,reassuring-learning,science-policy-interface,scientific-communication,statistics,surprise,uncertainty,unknown}, lccn = {INRMM-MiD:c-12515508}, number = {3-4} }
@article{caballeroStatedependentClimateSensitivity2013, title = {State-Dependent Climate Sensitivity in Past Warm Climates and Its Implications for Future Climate Projections}, author = {Caballero, Rodrigo and Huber, Matthew}, year = {2013}, month = aug, volume = {110}, pages = {14162--14167}, issn = {1091-6490}, doi = {10.1073/pnas.1303365110}, abstract = {Projections of future climate depend critically on refined estimates of climate sensitivity. Recent progress in temperature proxies dramatically increases the magnitude of warming reconstructed from early Paleogene greenhouse climates and demands a close examination of the forcing and feedback mechanisms that maintained this warmth and the broad dynamic range that these paleoclimate records attest to. Here, we show that several complementary resolutions to these questions are possible in the context of model simulations using modern and early Paleogene configurations. We find that (i) changes in boundary conditions representative of slow '' Earth system'' feedbacks play an important role in maintaining elevated early Paleogene temperatures, (ii) radiative forcing by carbon dioxide deviates significantly from pure logarithmic behavior at concentrations relevant for simulation of the early Paleogene, and (iii) fast or '' Charney'' climate sensitivity in this model increases sharply as the climate warms. Thus, increased forcing and increased slow and fast sensitivity can all play a substantial role in maintaining early Paleogene warmth. This poses an equifinality problem: The same climate can be maintained by a different mix of these ingredients; however, at present, the mix cannot be constrained directly from climate proxy data. The implications of strongly state-dependent fast sensitivity reach far beyond the early Paleogene. The study of past warm climates may not narrow uncertainty in future climate projections in coming centuries because fast climate sensitivity may itself be state-dependent, but proxies and models are both consistent with significant increases in fast sensitivity with increasing temperature.}, journal = {Proceedings of the National Academy of Sciences}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12661290,climate-change,climate-projections,earth-system,environmental-modelling,feedback,modelling,modelling-uncertainty,paleo-climate,prediction-bias,temperature,uncertainty}, lccn = {INRMM-MiD:c-12661290}, number = {35} }
@article{ title = {Interactions between transient and sustained neural signals support the generation and regulation of anxious emotion}, type = {article}, year = {2013}, identifiers = {[object Object]}, keywords = {BNST,amygdala,emotion,fMRI,insula,intolerance of uncertainty,unpredictability}, pages = {49-60}, volume = {23}, websites = {http://www.cercor.oxfordjournals.org/cgi/doi/10.1093/cercor/bhr373,http://www.ncbi.nlm.nih.gov/pubmed/22250290}, month = {1}, day = {16}, id = {b98afb7e-b571-32ac-b5ed-c297712938f0}, created = {2012-01-17T16:53:51.000Z}, accessed = {2013-01-28}, file_attached = {true}, profile_id = {cf0b444c-22b5-3480-a8e8-61f273e9a654}, last_modified = {2015-11-07T19:51:31.000Z}, read = {true}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, abstract = {Anxious emotion can manifest on brief (threat response) and/or persistent (chronic apprehension and arousal) timescales, and prior work has suggested that these signals are supported by separable neural circuitries. This fMRI study utilized a mixed block-event-related emotional provocation paradigm in 55 healthy participants to simultaneously measure brief and persistent anxious emotional responses, testing the specificity of, and interactions between, these potentially distinct systems. Results indicated that components of emotional processing networks were uniquely sensitive to transient and sustained anxious emotion. Whereas the amygdala and midbrain showed only transient responses, the ventral basal forebrain and anterior insula showed sustained activity during extended emotional contexts that tracked positively with task-evoked anxiety. States of lesser anxiety were associated with greater sustained activity in the ventromedial prefrontal cortex. Furthermore, ventromedial prefrontal recruitment was lower in individuals with higher scores on intolerance of uncertainty measures, and this hyporecruitment predicted greater transient amygdala responding to potential threat cues. This work demonstrates how brain circuitries interact across temporal scales to support brief and persistent anxious emotion and suggests potentially divergent mechanisms of dysregulation in clinical syndromes marked by brief versus persistent symptoms of anxiety.}, bibtype = {article}, author = {Somerville, Leah H and Wagner, Dylan D. and Wig, Gagan S. and Moran, Joseph M. and Whalen, Paul J. and Kelley, William M.}, journal = {Cerebral Cortex}, number = {1} }
@inproceedings{TUW-218372, author = {Navratil, Gerhard and Frank, Andrew U.}, title = {VGI for Land Administration - A Quality Perspective}, booktitle = {8th International Symposium on Spatial Data Quality}, year = {2013}, editor = {Wu, B. and Guilbert, E. and Shi, Wenzhong}, publisher = {ISPRS Archives}, address = {XL-2/W1, 2013}, numpages = {5}, eid = {159}, keywords = {Crowd-sourcing, Location check-in, Quality analysis, Spatial registration, Uncertainty, Error distribution}, note = {Vortrag: 8th International Symposium on Spatial Data Quality, Hong Kong; 2013-05-30 -- 2013-06-01} }
@article{huntingfordNoIncreaseGlobal2013, title = {No Increase in Global Temperature Variability despite Changing Regional Patterns}, author = {Huntingford, Chris and Jones, Philip D. and Livina, Valerie N. and Lenton, Timothy M. and Cox, Peter M.}, year = {2013}, month = jul, volume = {500}, pages = {327--330}, issn = {0028-0836}, doi = {10.1038/nature12310}, abstract = {Evidence from Greenland ice cores shows that year-to-year temperature variability was probably higher in some past cold periods1, but there is considerable interest in determining whether global warming is increasing climate variability at present2, 3, 4, 5, 6. This interest is motivated by an understanding that increased variability and resulting extreme weather conditions may be more difficult for society to adapt to than altered mean conditions3. So far, however, in spite of suggestions of increased variability2, there is considerable uncertainty as to whether it is occurring7. Here we show that although fluctuations in annual temperature have indeed shown substantial geographical variation over the past few decades2, the time-evolving standard deviation of globally averaged temperature anomalies has been stable. A feature of the changes has been a tendency for many regions of low variability to experience increases, which might contribute to the perception of increased climate volatility. The normalization of temperature anomalies2 creates the impression of larger relative overall increases, but our use of absolute values, which we argue is a more appropriate approach, reveals little change. Regionally, greater year-to-year changes recently occurred in much of North America and Europe. Many climate models predict that total variability will ultimately decrease under high greenhouse gas concentrations, possibly associated with reductions in sea-ice cover. Our findings contradict the view that a warming world will automatically be one of more overall climatic variation.}, journal = {Nature}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12527591,climate-extremes,complexity,global-scale,global-warming,multi-scale,non-linearity,regional-scale,spatial-pattern,temperature,uncertainty}, lccn = {INRMM-MiD:c-12527591}, number = {7462} }
@article{fyfeOverestimatedGlobalWarming2013, title = {Overestimated Global Warming over the Past 20 Years}, author = {Fyfe, John C. and Gillett, Nathan P. and Zwiers, Francis W.}, year = {2013}, month = sep, volume = {3}, pages = {767--769}, issn = {1758-678X}, doi = {10.1038/nclimate1972}, abstract = {Recent observed global warming is significantly less than that simulated by climate models. This difference might be explained by some combination of errors in external forcing, model response and internal climate variability.}, journal = {Nature Clim. Change}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12600607,climate-change,climate-projections,extrapolation-error,global-warming,modelling-uncertainty,prediction-bias,uncertainty}, lccn = {INRMM-MiD:c-12600607}, number = {9} }
@article{jagodicNurtureYourScientific2013, title = {Nurture Your Scientific Curiosity Early in Your Research Career}, author = {Jagodic, Maja and Stridh, Pernilla and Gad, Annica K. B. and Paine, Ananta and Udekwu, Klas I. and Sjoholm, Louise K. and Svensson, Mattias and {Pan-Hammarstrom}, Qiang}, year = {2013}, month = jan, volume = {45}, pages = {116--118}, issn = {1061-4036}, doi = {10.1038/ng.2527}, abstract = {Uncertainty makes scientific research challenging and at the same time exciting. Whereas curiosity and passion for uncovering the unknown drive future generations of researchers, the landscape of science has changed. We investigated whether the requirements for having a successful research career are changing, and whether junior researchers are aware of these requirements. Structured discussion with peers and more experienced researchers can point the way forward to an excellent career.}, journal = {Nature Genetics}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-11975747,complexity,cooperation,research-management,research-metrics,research-team-size,science-ethics,serendipity,transdisciplinary-research,uncertainty}, lccn = {INRMM-MiD:c-11975747}, number = {2} }
@article{nunezInclusionSoilErosion2013, title = {Inclusion of Soil Erosion Impacts in Life Cycle Assessment on a Global Scale: Application to Energy Crops in {{Spain}}}, author = {N{\'u}{\~n}ez, Montserrat and Ant{\'o}n, Assumpci{\'o} and Mu{\~n}oz, Pere and Rieradevall, Joan}, year = {2013}, volume = {18}, pages = {755--767}, issn = {1614-7502}, doi = {10.1007/s11367-012-0525-5}, abstract = {Purpose Despite the fundamental role of ecosystem goods and services in sustaining human activities, there is no harmonized and internationally agreed method for including them in life cycle assessment (LCA). The main goal of this study was to develop a globally applicable and spatially resolved method for assessing land use impacts on the erosion regulation ecosystem service. Methods Soil erosion depends much on location. Thus, unlike conventional LCA, the endpoint method was regionalized at the grid cell level (5~arcmin, approximately 10\,\texttimes\,10~km2) to reflect the spatial conditions of the site. Spatially explicit characterization factors were not further aggregated at broader spatial scales. Results and discussion Life cycle inventory data of topsoil and topsoil organic carbon (SOC) losses were interpreted at the endpoint level in terms of the ultimate damage to soil resources and ecosystem quality. Human health damages were excluded from the assessment. The method was tested on a case study of five 3-year agricultural rotations, two of them with energy crops, grown in several locations in Spain. A large variation in soil and SOC losses was recorded in the inventory step, depending on climatic and edaphic conditions. The importance of using a spatially explicit model and characterization factors is shown in the case study. Conclusions The regionalized assessment takes into account the differences in soil erosion-related environmental impacts caused by the great variability of soils. Taking this regionalized framework as the starting point, further research should focus on testing the applicability of the method through the complete life cycle of a product and on determining an appropriate spatial scale at which to aggregate characterization factors in order to deal with data gaps on the location of processes, especially in the background system. Additional research should also focus on improving the reliability of the method by quantifying and, insofar as it is possible, reducing uncertainty.}, journal = {The International Journal of Life Cycle Assessment}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-12037040,assessment,ecosystem-services,global-scale,soil-erosion,soil-resources,uncertainty,vegetation}, lccn = {INRMM-MiD:c-12037040}, number = {4} }
@article{seebachChoiceForestMap2012, title = {Choice of Forest Map Has Implications for Policy Analysis: A Case Study on the {{EU}} Biofuel Target}, author = {Seebach, Lucia and McCallum, Ian and Fritz, Steffen and Kindermann, Georg and Leduc, Sylvain and B{\"o}ttcher, Hannes and Fuss, Sabine}, year = {2012}, month = oct, volume = {22}, pages = {13--24}, issn = {1462-9011}, doi = {10.1016/j.envsci.2012.04.010}, abstract = {With the increasing availability of European and global forest maps, users are facing the difficult choice to select the most appropriate map for their purposes. Many of these maps are potential input datasets for forest-related applications for the European Union (EU), due to their spatial extent and harmonised approach at the European level. However, they possess different characteristics in terms of spatial detail or thematic accuracy. Little attention has been paid to the effect of these characteristics on simulation models and the resultant policy implications. In this study we tested whether the choice of a forest map has substantial influence on model output, i.e. if output differences can be related to the input differences. A sensitivity analysis of the spatially explicit Global Forest Model (G4M) was performed using four different forest maps: the pan-European high resolution forest/non-forest map (FMAP), the Corine Land Cover (CLC), the Calibrated European Forest Map (CEFM) and the Global Land Cover (GLC). Finally, the impact of potential differences owing to input datasets on decision-making was tested in a selected case study: reaching the EU 10\,\% biofuel target through enhanced utilization of forest biomass. The sensitivity analysis showed that the choice of the forest cover map has a major influence on the model outputs in particular at the country-level, while having less influence at the EU27 level. Differences between the input datasets are strongly reflected in the outputs. Similarly, depending on the choice of the input alternate options for decision-making were found within the hypothesized biofuel target (case study), demonstrating a substantial value of information. In general, it was demonstrated that input maps are the major driver of decision-making if forest resource outputs of the model are their basis. Improvement of the input forest map would result in immediate benefit for a better decision-making basis. \^a\textordmasculine{} Sensitivity analysis of a simulation model showed strong influence of forest maps. \^a\textordmasculine{} Strongest effect on model outputs at country-level, less influence at EU27 level. \^a\textordmasculine{} An EU biofuel case study found similar effect of forest maps on decision-making. \^a\textordmasculine{} Importance of careful choice of forest input maps for models is highlighted. \^a\textordmasculine{} Map improvement brings immediate benefit to decision-making policy process.}, journal = {Environmental Science \& Policy}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-10900721,bioenergy,communicating-uncertainty,environmental-modelling,europe,forest-resources,mapping,modelling,science-based-decision-making,science-policy-interface,uncertainty}, lccn = {INRMM-MiD:c-10900721} }
@article{dimaioUncertaintyOptimalLevel2012, title = {Uncertainty and the Optimal Level of Specialization}, author = {Di Maio, Michele and Valente, Marco}, year = {2012}, month = sep, volume = {66}, pages = {213--218}, issn = {1090-9443}, doi = {10.1016/j.rie.2012.04.001}, abstract = {Using a two-sector one-factor comparative-advantage-based trade model under uncertainty, we show that (1) to specialize according to comparative advantages may be sub-optimal in a multi-period setting; (2) there are conditions under which, even if agents are risk-neutral, the decentralized solution is inefficient and characterized by overspecialization. \^a\textordmasculine{} Comparative-advantage-induced full specialization may be sub-optimal in a multi-period setting. \^a\textordmasculine{} The centralized and the decentralized optimal level of specialization are different in a multi-period setting. \^a\textordmasculine{} The decentralized optimal level of specialization is inefficient and characterized by overspecialization.}, journal = {Research in Economics}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-10618916,competition,economics,local-over-complication,overspecialization,uncertainty}, lccn = {INRMM-MiD:c-10618916}, number = {3} }
@inproceedings{tully_constrained_2012, title = {Constrained filtering with contact detection data for the localization and registration of continuum robots in flexible environments}, doi = {10.1109/ICRA.2012.6225080}, abstract = {This paper presents a novel filtering technique that uses contact detection data and environmental stiffness estimates to register and localize a robot with respect to an a priori 3D surface model. The algorithm leverages geometric constraints within a Kalman filter framework and relies on two distinct update procedures: 1) an equality constrained step for when the robot is forcefully contacting the environment, and 2) an inequality constrained step for when the robot lies in the free-space of the environment. This filtering procedure registers the robot by incrementally eliminating probabilistically infeasible state space regions until a high likelihood solution emerges. In addition to registration and localization, the algorithm can estimate the deformation of the surface model and can detect false positives with respect to contact estimation. This method is experimentally evaluated with an experiment involving a continuum robot interacting with a bench-top flexible structure. The presented algorithm produces an experimental error in registration (with respect to the end-effector position) of 1.1 mm, which is less than 0.8 percent of the robot length.}, booktitle = {2012 {IEEE} {International} {Conference} on {Robotics} and {Automation}}, author = {Tully, S. and Bajo, A. and Kantor, G. and Choset, H. and Simaan, N.}, month = may, year = {2012}, keywords = {3D surface model, Estimation, Kalman filter framework, Kalman filters, Robot kinematics, Robot sensing systems, Uncertainty, Vectors, bench-top flexible structure, constrained filtering, contact detection data, contact estimation, continuum robot localization, continuum robot registration, end effectors, end-effector position, equality constrained step, filtering theory, flexible environments, geometric constraints, medical robotics, solid modelling, surface model deformation estimation}, pages = {3388--3394} }
@article{tarasov_data-calibrated_2012, title = {A data-calibrated distribution of deglacial chronologies for the {North} {American} ice complex from glaciological modeling}, volume = {315-316}, issn = {0012821X}, doi = {10.1016/j.epsl.2011.09.010}, abstract = {Past deglacial ice sheet reconstructions have generally relied upon discipline-specific constraints with no attention given to the determination of objective confidence intervals. Reconstructions based on geophysical inversion of relative sea level (RSL) data have the advantage of large sets of proxy data but lack ice-mechanical constraints. Conversely, reconstructions based on dynamical ice sheet models are glaciologically self-consistent, but depend on poorly constrained climate forcings and sub-glacial processes.As an example of a much better constrained methodology that computes explicit error bars, we present a distribution of high-resolution glaciologically-self-consistent deglacial histories for the North American ice complex calibrated against a large set of RSL, marine limit, and geodetic data. The history is derived from ensemble-based analyses using the 3D MUN glacial systems model and a high-resolution ice-margin chronology derived from geological and geomorphological observations. Isostatic response is computed with the VM5a viscosity structure. Bayesian calibration of the model is carried out using Markov Chain Monte Carlo methods in combination with artificial neural networks trained to the model results. The calibration provides a posterior distribution for model parameters (and thereby modeled glacial histories) given the observational data sets that takes data uncertainty into account. Final ensemble results also account for fits between computed and observed strandlines and marine limits.Given the model (including choice of calibration parameters), input and constraint data sets, and VM5a earth rheology, we find the North American contribution to mwp1a was likely between 9.4 and 13.2. m eustatic over a 500. year interval. This is more than half of the total 16 to 26. m meltwater pulse over 500 to 700. years (with lower values being more probable) indicated by the Barbados coral record (Fairbanks, 1989; Peltier and Fairbanks, 2006) if one assumes a 5. meter living range for the Acropora Palmata coral. 20. ka ice volume for North America was likely 70.1 ± 2.0. m eustatic, or about 60\% of the total contribution to eustatic sea level change. We suspect that the potentially most critical unquantified uncertainties in our analyses are those related to model structure (especially climate forcing), deglacial ice margin chronology, and earth rheology. © 2011 Elsevier B.V.}, journal = {Earth and Planetary Science Letters}, author = {Tarasov, Lev and Dyke, Arthur S and Neal, Radford M and Peltier, W Richard}, year = {2012}, note = {ISBN: 0012-821X Publisher: Elsevier}, keywords = {Glacial model, Ice sheet reconstruction, Laurentide deglaciation, Meltwater pulse, Model calibration, Uncertainty}, pages = {30--40}, }
@article{whiteValueCoordinatedManagement2012, title = {The Value of Coordinated Management of Interacting Ecosystem Services}, author = {White, Crow and Costello, Christopher and Kendall, Bruce E. and Brown, Christopher J.}, year = {2012}, month = jun, volume = {15}, pages = {509--519}, issn = {1461-0248}, doi = {10.1111/j.1461-0248.2012.01773.x}, abstract = {Coordinating decisions and actions among interacting sectors is a critical component of ecosystem-based management, but uncertainty about coordinated management's effects is compromising its perceived value and use. We constructed an analytical framework for explicitly calculating how coordination affects management decisions, ecosystem state and the provision of ecosystem services in relation to ecosystem dynamics and socio-economic objectives. The central insight is that the appropriate comparison strategy to optimal coordinated management is optimal uncoordinated management, which can be identified at the game theoretic Nash equilibrium. Using this insight we can calculate coordination's effects in relation to uncoordinated management and other reference scenarios. To illustrate how this framework can help identify ecosystem and socio-economic conditions under which coordination is most influential and valuable, we applied it to a heuristic case study and a simulation model for the California Current Marine Ecosystem. Results indicate that coordinated management can more than double an ecosystem's societal value, especially when sectors can effectively manipulate resources that interact strongly. However, societal gains from coordination will need to be reconciled with observations that it also leads to strategic simplification of the ecological food web, and generates both positive and negative impacts on individual sectors and non-target species.}, journal = {Ecology Letters}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-10562193,cross-disciplinary-perspective,ecology,ecosystem-services,integrated-natural-resources-modelling-and-management,integration-techniques,multi-objective-planning,uncertainty}, lccn = {INRMM-MiD:c-10562193}, number = {6} }
@book{fortmann-roeUnderstandingBiasvarianceTradeoff2012, title = {Understanding the Bias-Variance Tradeoff}, author = {{Fortmann-Roe}, Scott}, year = {2012}, abstract = {When we discuss prediction models, prediction errors can be decomposed into two main subcomponents we care about: error due to "bias" and error due to "variance". There is a tradeoff between a model's ability to minimize bias and variance. Understanding these two types of error can help us diagnose model results and avoid the mistake of over- or under-fitting.}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13849984,data-uncertainty,featured-publication,modelling-uncertainty,overfitting,prediction,prediction-bias,trade-offs,uncertainty,underfitting}, lccn = {INRMM-MiD:c-13849984}, series = {Essays} }
@article{thuillerConsequencesClimateChange2011, title = {Consequences of Climate Change on the Tree of Life in {{Europe}}}, author = {Thuiller, Wilfried and Lavergne, Sebastien and Roquet, Cristina and Boulangeat, Isabelle and Lafourcade, Bruno and Araujo, Miguel}, year = {2011}, month = feb, volume = {470}, pages = {531--534}, issn = {0028-0836}, doi = {10.1038/nature09705}, abstract = {Many species are projected to become vulnerable to twenty-first-century climate changes1, 2, with consequent effects on the tree of life. If losses were not randomly distributed across the tree of life, climate change could lead to a disproportionate loss of evolutionary history3, 4, 5. Here we estimate the consequences of climate change on the phylogenetic diversities of plant, bird and mammal assemblages across Europe. Using a consensus across ensembles of forecasts for 2020, 2050 and 2080 and high-resolution phylogenetic trees, we show that species vulnerability to climate change clusters weakly across phylogenies. Such phylogenetic signal in species vulnerabilities does not lead to higher loss of evolutionary history than expected with a model of random extinctions. This is because vulnerable species have neither fewer nor closer relatives than the remaining clades. Reductions in phylogenetic diversity will be greater in southern Europe, and gains are expected in regions of high latitude or altitude. However, losses will not be offset by gains and the tree of life faces a trend towards homogenization across the continent.}, journal = {Nature}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-8834184,climate-change,communicating-uncertainty,consensus,ensemble,median,modelling,uncertainty}, lccn = {INRMM-MiD:c-8834184}, number = {7335} }
@article{Arlo-costa2011, author = {Arl\'{o}-Costa, H. and Helzner, Jeffrey}, journal = {ISIPTA}, keywords = {description,descriptive,ex-,normative,perience,uncertainty}, title = {{The Description / Experience Gap in the Case of Uncertainty}}, year = {2011} }
@article{ title = {Optimizing clinical environments for knowledge translation: strategies for nursing leaders.}, type = {article}, year = {2011}, identifiers = {[object Object]}, keywords = {Evidence-Based Nursing,Humans,Models, Nursing,Models, Organizational,Nurse Administrators,Nursing, Supervisory,Organizational Culture,Patient Care Team,evidence based nursing,human,leadership,methodology,model,nonbiological model,nurse administrator,nursing,nursing research,organization,organization and management,patient care,psychological aspect,review,social environment,uncertainty}, pages = {73-85}, volume = {24}, websites = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84857378230&partnerID=40&md5=01153990c31273801d8559dc71b43d25}, city = {Affiliation: Faculty of Nursing, University of Alberta, Edmonton, AB, Canada.; Correspondence Address: Scott, S.D.email: Shannon.scott@ualberta.ca}, id = {dd7d9064-764b-3047-b4c2-a2fbc91d7611}, created = {2016-08-21T22:17:39.000Z}, file_attached = {false}, profile_id = {217ced55-4c79-38dc-838b-4b5ea8df5597}, group_id = {408d37d9-5f1b-3398-a9f5-5c1a487116d4}, last_modified = {2017-03-14T09:54:45.334Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, source_type = {JOUR}, folder_uuids = {028056a6-dab5-46a4-b9bf-02542e7cfa2b}, private_publication = {false}, abstract = {Using findings from our recent study that found that a context of uncertainty in the work environment hindered nurses' research utilization, we suggest strategies for nurse managers and leaders to optimize clinical environments and support efforts to put research into clinical practice (knowledge translation). Two important sources of uncertainty were the complexity of teamwork and inconsistency in management and leadership styles. To reduce the uncertainty arising from teamwork, we propose (a) clarifying nurses' scopes of practice, (b) increasing knowledge sharing through supporting journal clubs and enhanced computer access and (c) creating safe venues for multidisciplinary dialogue. To reduce uncertainty arising from variations in management and leadership, we propose (a) developing policies that enhance the consistency of leadership and clarify the strategic direction of the management team, (b) clearly communicating those policies to nurses and (c) providing explicit rationales for treatment changes. Small, incremental steps can be taken to realize substantive changes in clinical environments in order to optimize nursing work environments for knowledge translation.}, bibtype = {article}, author = {Scott, S D and VandenBeld, B and Cummings, G G}, journal = {Nursing leadership (Toronto, Ont.)}, number = {3} }
@article{seebachIdentifyingStrengthsLimitations2011, title = {Identifying Strengths and Limitations of Pan-{{European}} Forest Cover Maps through Spatial Comparison}, author = {Seebach, Lucia M. and Strobl, Peter and {San-Miguel-Ayanz}, Jes{\'u}s and {Bastrup-Birk}, Annemarie}, year = {2011}, month = oct, volume = {25}, pages = {1865--1884}, issn = {1362-3087}, doi = {10.1080/13658816.2011.562211}, abstract = {Detailed and harmonized information on spatial forest distribution is an essential input for forest-related environmental assessments, in particular, for biomass and growing stock modeling. In the last years, several mapping approaches have been developed in order to provide such information for Europe in a harmonized way. Each of these maps exhibits particular properties and varies in accuracy. Yet, they are often used in parallel for different modeling purposes. A detailed spatial comparison seemed necessary in order to provide information on the advantages and limitations of each of these forest cover maps in order to facilitate their selection for modeling purposes. This article confronts the high-resolution forest cover map recently developed by the Joint Research Centre for the year 2000 (FMAP2000) with previously existing maps for the same time period: the CORINE Land Cover 2000 (CLC2000) and the Calibrated European Forest Map 1996 (CEFM1996). The spatial comparison of these three maps was carried out based on forest proportion maps of 1 km derived from the original maps. To characterize differences according to biogeographic regions, two criteria were used: detail of thematic content within each map and local spatial agreement. Concerning thematic content, CLC2000 displayed a surfeit of non-forested areas at the cost of low forest proportions, while FMAP2000 showed a more balanced distribution likely to preserve more detail in forest spatial pattern. Good spatial agreement was found for CLC2000 and FMAP2000 within about 70\,\% of the study area, while only 50\,\% agreement was found when compared with CEFM1996. The largest spatial differences between all maps were found in the Alpine and Mediterranean regions. Reasons for these might be different input data and classification techniques and, in particular, the calibration of CEFM1996 to reported national statistics.}, journal = {International Journal of Geographical Information Science}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13028432,accuracy,assessment,comparison,europe,forest-resources,land-cover,mapping,modelling-uncertainty,spatial-analysis,uncertainty}, lccn = {INRMM-MiD:c-13028432}, number = {11} }
@article{brugnachMoreNotAlways2011, title = {More Is Not Always Better: Coping with Ambiguity in Natural Resources Management}, author = {Brugnach, M. and Dewulf, A. and Henriksen, H. J. and {van der Keur}, P.}, year = {2011}, month = jan, volume = {92}, pages = {78--84}, issn = {0301-4797}, doi = {10.1016/j.jenvman.2010.08.029}, abstract = {Coping with ambiguities in natural resources management has become unavoidable. Ambiguity is a distinct type of uncertainty that results from the simultaneous presence of multiple valid, and sometimes conflicting, ways of framing a problem. As such, it reflects discrepancies in meanings and interpretations. Under the presence of ambiguity it is not clear what problem is to be solved, who should be involved in the decision processes or what is an appropriate course of action. Despite the extensive literature about methodologies and tools to deal with uncertainty, not much has been said about how to handle ambiguities. In this paper, we discuss the notions of framing and ambiguity, and we identify five broad strategies to handle it: rational problem solving, persuasion, dialogical learning, negotiation and opposition. We compare these approaches in terms of their assumptions, mechanisms and outcomes and illustrate each approach with a number of concrete methods.}, journal = {Journal of Environmental Management}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-7940628,ambiguity,comparison,natural-resources-interactions,science-based-decision-making,semantics,uncertainty}, lccn = {INRMM-MiD:c-7940628}, number = {1} }
@article{citeulike:9928940, abstract = {In this article, we accomplish two things. First, we show that despite empirical psychologists' nominal endorsement of a low rate of false-positive findings (≤ .05), flexibility in data collection, analysis, and reporting dramatically increases actual false-positive rates. In many cases, a researcher is more likely to falsely find evidence that an effect exists than to correctly find evidence that it does not. We present computer simulations and a pair of actual experiments that demonstrate how unacceptably easy it is to accumulate (and report) statistically significant evidence for a false hypothesis. Second, we suggest a simple, low-cost, and straightforwardly effective disclosure-based solution to this problem. The solution involves six concrete requirements for authors and four guidelines for reviewers, all of which impose a minimal burden on the publication process. [Excerpt: Requirements for authors] We propose the following six requirements for authors. [::] Authors must decide the rule for terminating data collection before data collection begins and report this rule in the article [...] [::] Authors must collect at least 20 observations per cell or else provide a compelling cost-of-data-collection justification [...] [::] Authors must list all variables collected in a study. [...] [::] Authors must report all experimental conditions, including failed manipulations. [...] [::] If observations are eliminated, authors must also report what the statistical results are if those observations are included. [...] [::] If an analysis includes a covariate, authors must report the statistical results of the analysis without the covariate. [...] [Guidelines for reviewers] We propose the following four guidelines for reviewers. [::] Reviewers should ensure that authors follow the requirements. [...] [::] Reviewers should be more tolerant of imperfections in results. [...] [::] Reviewers should require authors to demonstrate that their results do not hinge on arbitrary analytic decisions. [...] [::] If justifications of data collection or analysis are not compelling, reviewers should require the authors to conduct an exact replication. [...] [Concluding Remarks] Our goal as scientists is not to publish as many articles as we can, but to discover and disseminate truth. Many of us—and this includes the three authors of this article—often lose sight of this goal, yielding to the pressure to do whatever is justifiable to compile a set of studies that we can publish. This is not driven by a willingness to deceive but by the self-serving interpretation of ambiguity, which enables us to convince ourselves that whichever decisions produced the most publishable outcome must have also been the most appropriate. This article advocates a set of disclosure requirements that imposes minimal costs on authors, readers, and reviewers. These solutions will not rid researchers of publication pressures, but they will limit what authors are able to justify as acceptable to others and to themselves. We should embrace these disclosure requirements as if the credibility of our profession depended on them. Because it does.}, author = {Simmons, Joseph P. and Nelson, Leif D. and Simonsohn, Uri}, citeulike-article-id = {9928940}, citeulike-linkout-0 = {http://mfkp.org/INRMM/article/9928940}, citeulike-linkout-1 = {http://dx.doi.org/10.1177/0956797611417632}, citeulike-linkout-2 = {http://scholar.google.it/scholar?cluster=18405714882620278531}, citeulike-linkout-3 = {http://dx.doi.org/10.1177/0956797611417632}, citeulike-linkout-4 = {http://pss.sagepub.com/content/22/11/1359.abstract}, citeulike-linkout-5 = {http://pss.sagepub.com/content/22/11/1359.full.pdf}, citeulike-linkout-6 = {http://pss.sagepub.com/cgi/content/abstract/22/11/1359}, citeulike-linkout-7 = {http://view.ncbi.nlm.nih.gov/pubmed/22006061}, citeulike-linkout-8 = {http://www.hubmed.org/display.cgi?uids=22006061}, day = {01}, doi = {10.1177/0956797611417632}, issn = {1467-9280}, journal = {Psychological Science}, keywords = {check-list, cognitive-biases, communicating-uncertainty, false-positive, p-value, psychology, science-ethics, statistics, uncertainty, validation}, month = nov, number = {11}, pages = {1359--1366}, pmid = {22006061}, posted-at = {2013-01-11 18:44:40}, priority = {2}, publisher = {SAGE Publications}, title = {{False-Positive} Psychology}, url = {http://mfkp.org/INRMM/article/9928940}, volume = {22}, year = {2011} }
@article{mcfadden_evaluating_2011, title = {Evaluating the efficacy of adaptive management approaches: {Is} there a formula for success?}, volume = {92}, issn = {0301-4797}, shorttitle = {Evaluating the efficacy of adaptive management approaches}, url = {http://www.sciencedirect.com/science/article/B6WJ7-51M4Y8J-1/2/766835287ce0a45f86c63d60ae71aad8}, doi = {10.1016/j.jenvman.2010.10.038}, abstract = {{\textless}p{\textgreater}{\textless}br/{\textgreater}Within the field of natural-resources management, the application of adaptive management is appropriate for complex problems high in uncertainty. Adaptive management is becoming an increasingly popular management-decision tool within the scientific community and has developed into two primary schools of thought: the Resilience-Experimentalist School (with high emphasis on stakeholder involvement, resilience, and highly complex models) and the Decision-Theoretic School (which results in relatively simple models through emphasizing stakeholder involvement for identifying management objectives). Because of these differences, adaptive management plans implemented under each of these schools may yield varying levels of success. We evaluated peer-reviewed literature focused on incorporation of adaptive management to identify components of successful adaptive management plans. Our evaluation included adaptive management elements such as stakeholder involvement, definitions of management objectives and actions, use and complexity of predictive models, and the sequence in which these elements were applied. We also defined a scale of degrees of success to make comparisons between the two adaptive management schools of thought. Our results include the relationship between the adaptive management process documented in the reviewed literature and our defined continuum of successful outcomes. Our data suggest an increase in the number of published articles with substantive discussion of adaptive management from 2000 to 2009 at a mean rate of annual change of 0.92 (r2 = 0.56). Additionally, our examination of data for temporal patterns related to each school resulted in an increase in acknowledgement of the Decision-Theoretic School of thought at a mean annual rate of change of 0.02 (r2 = 0.6679) and a stable acknowledgement for the Resilience-Experimentalist School of thought (r2 = 0.0042; slope = 0.0013). Identifying the elements of successful adaptive management will be advantageous to natural-resources managers considering adaptive management as a decision tool.{\textless}/p{\textgreater}}, number = {5}, urldate = {2011-03-02}, journal = {Journal of Environmental Management}, author = {McFadden, Jamie E. and Hiller, Tim L. and Tyre, Andrew J.}, month = may, year = {2011}, keywords = {Adaptive management, Resilience, Structured decision-making, uncertainty}, pages = {1354--1359}, file = {ScienceDirect Full Text PDF:files/33026/McFadden et al. - 2011 - Evaluating the efficacy of adaptive management app.pdf:application/pdf;ScienceDirect Snapshot:files/33027/science.html:text/html} }
@article{ jaafari_relationship_2011, title = {The relationship between insight and uncertainty in obsessive-compulsive disorder}, volume = {44}, issn = {1423-033X}, doi = {10.1159/000323607}, abstract = {{BACKGROUND}: The aim of this study was to investigate the relationship between the levels of insight and checking-related uncertainty in patients with obsessive-compulsive disorder ({OCD}). {SAMPLING} {AND} {METHODS}: Twenty {OCD} patients with checking compulsions and without current comorbidity were recruited. We used an experimental paradigm that gave subjects the opportunity to check during a decision-making task, thereby allowing for the calculation of a response time index ({RTI}) as the 'uncertainty cost' during decision-making. The level of insight was assessed with the Brown Assessment of Beliefs Scale ({BABS}). {RESULTS}: Regression analyses indicated a significant positive correlation between {RTI} and {BABS} scores (r = 0.49). {CONCLUSIONS}: The level of insight is related to cognitive characteristics underlying {OCD} symptoms, in particular, checking-related uncertainty in checking {OCD} patients. {STUDY} {LIMITATIONS}: The absence of a comparison group and the low number of included patients are the main limitations of the present study.}, language = {eng}, number = {4}, journal = {Psychopathology}, author = {Jaafari, Nematollah and Aouizerate, Bruno and Tignol, Jean and El-Hage, Wissam and Wassouf, Issa and Guehl, Dominique and Bioulac, Bernard and Daniel, Marie-Laure and Lacoste, Jerome and Gil, Roger and Burbaud, Pierre and Rotge, Jean-Yves and {Insight Study Group}}, year = {2011}, pmid = {21546788}, keywords = {Adult, Compulsive Behavior, Decision Making, Female, Humans, Male, Middle Aged, Obsessive-Compulsive Disorder, Psychiatric Status Rating Scales, uncertainty}, pages = {272--276} }
@article{krysiak_optimal_2010, title = {The optimal size of a permit market}, volume = {60}, issn = {00950696}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0095069610000562}, doi = {10.1016/j.jeem.2010.05.001}, number = {2}, journal = {Journal of Environmental Economics and Management}, author = {Krysiak, Frank C. and Schweitzer, Patrick}, month = sep, year = {2010}, keywords = {Asymmetric information, Hot spot, Non-uniformly mixed pollutant, Regulation, Spatial model, Tradable permits, uncertainty}, pages = {133--143}, file = {ScienceDirect Full Text PDF:files/31871/Krysiak and Schweitzer - 2010 - The optimal size of a permit market.pdf:application/pdf;ScienceDirect Snapshot:files/31872/science.html:text/html} }
@article{cornfordUncertWebChainingWeb2010, title = {{{UncertWeb}}: Chaining Web Services Accounting for Uncertainty}, author = {Cornford, D. and Jones, R. and Bastin, L. and Williams, M. and Pebesma, E. and Nativi, S.}, year = {2010}, month = may, volume = {12}, pages = {9052+}, issn = {1607-7962}, abstract = {The development of interoperable services that permit access to data and processes, typically using web service based standards opens up the possibility for increasingly complex chains of data and processes, which might be discovered and composed in increasingly automatic ways. This concept, sometimes referred to as the "Model Web", offers the promise of integrated (Earth) system models, with pluggable web service based components which can be discovered, composed and evaluated dynamically. A significant issue with such service chains, indeed in any composite model composed of coupled components, is that in all interesting (non-linear) cases the effect of uncertainties on inputs, or components within the chain will have complex, potentially unexpected effects on the outputs. Within the FP7 UncertWeb project we will be developing a mechanism and an accompanying set of tools to enable rigorous uncertainty management in web based service chains involving both data and processes. The project will exploit and extend the UncertML candidate standard to flexibly propagate uncertainty through service chains, including looking at mechanisms to develop uncertainty enabled profiles of existing Open Geospatial Consortium services. To facilitate the use of such services we will develop tools to address the definition of the input uncertainties (elicitation), manage the uncertainty propagation (emulation), undertake uncertainty and sensitivity analysis and visualise the output uncertainty. In this talk we will outline the challenges of the UncertWeb project, illustrating this with a prototype service chain we have created for correcting station level pressure to sea-level pressure, which accounts for the various uncertainties involved. In particular we will discuss some of the challenges of chaining Open Geospatial Consortium services using the Business Process Execution Language. We will also address the issue of computational cost and communication bandwidth requirements for such systems. While the cost of obtaining an rigorous uncertainty analysis can be high, we would argue that without such quantified uncertainty estimates the output of a chain is almost useless, particularly in the case that the chain has been discovered and composed (semi) automatically.}, journal = {Geophysical Research Abstracts}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13321751,ogc,uncertainty,web-services}, lccn = {INRMM-MiD:c-13321751}, series = {{{EGU General Assembly Conference Abstracts}}} }
@article{goberVulnerabilityAssessmentClimateinduced2010, title = {Vulnerability Assessment of Climate-Induced Water Shortage in {{Phoenix}}}, author = {Gober, Patricia and Kirkwood, Craig W.}, year = {2010}, month = dec, volume = {107}, pages = {21295--21299}, issn = {1091-6490}, doi = {10.1073/pnas.0911113107}, abstract = {Global warming has profound consequences for the climate of the American Southwest and its overallocated water supplies. This paper uses simulation modeling and the principles of decision making under uncertainty to translate climate information into tools for vulnerability assessment and urban climate adaptation. A dynamic simulation model, WaterSim, is used to explore future water-shortage conditions in Phoenix. Results indicate that policy action will be needed to attain water sustainability in 2030, even without reductions in river flows caused by climate change. Challenging but feasible changes in lifestyle and slower rates of population growth would allow the region to avoid shortage conditions and achieve groundwater sustainability under all but the most dire climate scenarios. Changes in lifestyle involve more native desert landscaping and fewer pools in addition to slower growth and higher urban densities. There is not a single most likely or optimal future for Phoenix. Urban climate adaptation involves using science-based models to anticipate water shortage and manage climate risk.}, journal = {Proceedings of the National Academy of Sciences}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-8424286,assessment,climate-change,communicating-uncertainty,deep-uncertainty,global-warming,uncertainty,united-states,water-scarcity}, lccn = {INRMM-MiD:c-8424286}, number = {50} }
@incollection{portet2010towards, Address = {Berlin and Heidelberg}, Author = {Portet, F and Gatt, A}, Booktitle = {Knowledge Representation for Health-Care: Data, Processes and Guidelines}, Editor = {SMD Riano and A ten Teije and M Peleg}, Pages = {155--168}, Publisher = {Springer}, Title = {Towards a possibility-theoretic approach to uncertainty in medical data interpretation for text generation}, Url = {http://staff.um.edu.mt/albert.gatt/pubs/kr4hc-book-final.pdf}, Year = {2010}, Keywords = {natural language generation, data-to-text, fuzzy sets, uncertainty, modals}}
@article{larssonDecisionEvaluationResponse2010, title = {Decision {{Evaluation}} of {{Response Strategies}} in {{Emergency Management Using Imprecise Assessments}}}, author = {Larsson, Aron and Ekenberg, Love and Danielson, Mats}, year = {2010}, month = jan, volume = {7}, issn = {1547-7355}, doi = {10.2202/1547-7355.1646}, abstract = {This paper focuses on the decision evaluation of different response strategies in emergency management utilizing decision analysis with imprecise information. A method for the selection of response strategies in emergency management, as well as a model for the representation of catastrophic consequences, are proposed. In emergency management decision problems, the available estimates of probabilities, utilities, costs, and priority weights are often subject to large degrees of uncertainty and imprecision. When uncertainty prevails in the input data and large societal values are at stake, coping with this lack of precision becomes very important in decision making processes. The method employs representation of imprecision in probabilities, utilities, and weights on attributes in the form of interval statements and comparisons together with a formal, comprehensive, and comprehensible description of a catastrophic consequence facilitating the use of preferential statements between catastrophic consequences. The method proposed can be viewed as a more frugal decision analysis method, decreasing the efforts needed in elicitation of input statements which often is a cumbersome threshold for the use of decision analysis techniques. It is suggested as a complement to cost/benefit approaches and other approaches relying on inaccessible probabilistic data either when probability assessments regarding catastrophic events are too uncertain or when pure monetary scales are deemed inadequate.}, journal = {Journal of Homeland Security and Emergency Management}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13295874,decision-making,deep-uncertainty,disasters,emergency-events,fuzzy,risk-assessment,uncertainty}, lccn = {INRMM-MiD:c-13295874}, number = {1} }
@article{foodyAssessingAccuracyLand2010, title = {Assessing the Accuracy of Land Cover Change with Imperfect Ground Reference Data}, author = {Foody, Giles M.}, year = {2010}, month = oct, volume = {114}, pages = {2271--2285}, issn = {0034-4257}, doi = {10.1016/j.rse.2010.05.003}, abstract = {The ground data used as a reference in the validation of land cover change products are often not an ideal gold standard but degraded by error. The effects of ground reference data error on the accuracy of land cover change detection and the accuracy of estimates of the extent of change were evaluated. Twelve data sets were simulated to allow the exploration of the impacts of a spectrum of ground data imperfections on the estimation of the producer's and user's accuracy of change as well as of change extent. Simulated data were used since this ensured that the actual properties of the data were known and to exclude effects due to other sources of ground reference data error; although the impacts of simulated reference data error on two real confusion matrices are also illustrated. The imperfections evaluated ranged from the inclusion of small amounts of known error into the ground reference data through to the extreme situation in which ground data were absent. The results show that even small amounts of error in the ground reference data can introduce large error into studies of land cover change by remote sensing and reinforce the desire to avoid the expression ground truth as this might imply that the data are a gold standard reference. The effect of reference data imperfections was dependent on the degree of association between the errors in the cross-tabulated data sets. For example, in the scenarios investigated, a 10\,\% error in the reference data set introduced an underestimation of the producer's accuracy of 18.5\,\% if the errors were independent but an over-estimation of the producer's accuracy of 12.3\,\% if the errors were correlated. The magnitude of the mis-estimation of the producer's accuracy was also a function of the amount of change and greatest at low levels of change. The amount of land cover change estimated also varied greatly as a function of ground reference data error. Some possible methods to reduce or even remove the impacts of ground reference data error were illustrated. These ranged from simple algebraic means to estimate the actual values of accuracy and change extent if the imperfections were known through to a latent class analysis that allowed the assessment of classification accuracy and estimation of change extent without the use of ground reference data if the underlying model is defined appropriately.}, journal = {Remote Sensing of Environment}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-7393542,accuracy,cognitive-biases,data-errors,extrapolation-error,field-measurements,land-cover,modelling-uncertainty,non-linearity,statistics,uncertainty,uncertainty-propagation,validation}, lccn = {INRMM-MiD:c-7393542}, number = {10} }
@article{cheah_adaptive_2010, title = {Adaptive {Vision} and {Force} {Tracking} {Control} for {Robots} {With} {Constraint} {Uncertainty}}, volume = {15}, issn = {1083-4435}, doi = {10.1109/TMECH.2009.2027115}, abstract = {In force control applications of robots, it is difficult to obtain an exact model of a constraint surface. In presence of the constraint uncertainty, the robot needs to adapt to the uncertainty in external parameters due to the environment, in addition to the uncertainties in internal parameters of the robot kinematics and dynamics. In this paper, a visually servoed adaptive controller is proposed for motion and force tracking with uncertainties in the constraint surface, kinematics, dynamics, and camera model. We shall show that the robot can track the desired trajectories with the uncertain internal and external parameters updated online. This gives the robot a high degree of flexibility in dealing with changes and uncertainties in its model and the environment.}, number = {3}, journal = {IEEE/ASME Transactions on Mechatronics}, author = {Cheah, C. C. and Hou, S. P. and Zhao, Y. and Slotine, J. J. E.}, month = jun, year = {2010}, keywords = {Adaptive control, Cameras, Force control, Motion control, Programmable control, Robot control, Robot kinematics, Robot vision systems, Tracking, Uncertainty, adaptive control, adaptive controller, adaptive vision, constraint uncertainty, force control, force tracking control, position control, robot control, trajectory tracking, uncertain kinematics and dynamics, visual servoing}, pages = {389--399} }
@article{grune-yanoffPhilosophyEpistemologySimulation2010, title = {The Philosophy and Epistemology of Simulation: A Review}, author = {{Gr{\"u}ne-Yanoff}, Till and Weirich, Paul}, year = {2010}, month = feb, volume = {41}, pages = {20--50}, issn = {1552-826X}, doi = {10.1177/1046878109353470}, abstract = {The philosophical literature on simulations has increased dramatically during the past 40 years. Many of its main topics are epistemological. For example, philosophers consider how the results of simulations help explain natural phenomena. This essay's review treats mainly simulations in the social sciences. It considers the nature of simulations, the varieties of simulation, and uses of simulations for representation, prediction, explanation, and policy decisions. Being oriented toward philosophy of science, it compares simulations to models and experiments and considers whether simulations raise new methodological issues.The essay concludes that several features of simulations set them apart from models and experiments and make them novel scientific tools, whose powers and limits are not yet well understood.}, journal = {Simulation \& Gaming}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-7109018,automation,computational-science,epistemology,science-ethics,science-policy-interface,scientific-communication,uncertainty}, lccn = {INRMM-MiD:c-7109018}, number = {1} }
@article{mearnsDramaUncertainty2010, title = {The Drama of Uncertainty}, author = {Mearns, Linda O.}, year = {2010}, month = may, volume = {100}, pages = {77--85}, issn = {0165-0009}, doi = {10.1007/s10584-010-9841-6}, abstract = {[Excerpt] Concluding remarks. We do not have consensus among stakeholders/decision makers, climate scientists, and social scientists on the relative importance of reducing uncertainty about future regional climate change for decision making. And we may never have such consensus. In this regard we are faced with an instance of meta-deep uncertainty. While it makes sense that there may be different perspectives on this issue based on what aspect of regional adaptation to climate change one is considering, it is also clear that perspectives are still too far apart. Looking at the problem of planning for adaptation as a whole, the failure to arrive at a more unified perspective on the issue of reducing climate change uncertainty indicates a failure to advance seamless interdisciplinarity. The journal Climatic Change, perhaps more than any other, has promoted the broad-based interdisciplinarity that is needed for us to move ahead on this problem. It will continue to be instrumental as we move forward to create the seamless interdisciplinarity that is needed. Large amounts of computing power and funding likely will continue to be dedicated to modeling future climate. The large number of climate simulations at higher spatial resolutions planned for the IPCC Fifth Assessment Report attests to this, and we will certainly learn from these results. But will we learn as much as we might if we had a truly balanced research program that also provided sufficient funding for indepth vulnerability assessments and investment in improving or expanding decision making protocols under deep uncertainty? Let us hope that such a balanced program is still possible.}, journal = {Climatic Change}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-7298602,accuracy,adaptation,climate-change,deep-uncertainty,science-based-decision-making,science-policy-interface,uncertainty}, lccn = {INRMM-MiD:c-7298602}, number = {1} }
@article{stankiewicz_role_2009, title = {The role of risks and uncertainties in technological conflicts: {Three} strategies of constructing ignorance}, volume = {22}, shorttitle = {The role of risks and uncertainties in technological conflicts}, doi = {10.1080/13511610902770636}, abstract = {How are the conflicts over the use of certain technologies - such as biotechnology, nuclear energy or nanotechnologies - being solved? What are the methods used by conflicting parties to assert their definitions of reality? What role do uncertainties and risks play in these conflicts? How are they treated? What strategies are used by proponents and opponents of a controversial technology to persuade the public and decision-makers? This article aims at finding answers to these questions by looking at technological conflicts from the perspective of the reduction of risks and uncertainty. The lesson drawn from the study of ongoing and past conflicts over controversies in technological development should help to better understand the dynamics of conflicts focused on converging technologies. The reduction of uncertainty is analyzed from the perspective of the sociology of non-knowledge and ignorance. It is argued that new areas of non-knowledge are being created by reducing uncertainty and risks in technological conflicts. © 2009 Interdisciplinary Centre for Comparative Research in the Social Sciences and ICCR Foundation.}, number = {1}, journal = {Innovation}, author = {Stankiewicz, P.}, year = {2009}, note = {1}, keywords = {10 Ignorance, uncertainty and risk, Controversies, Ignorance, incertitude et risque, Non-knowledge, PRINTED (Fonds papier), Risk, Technological conflicts, ignorance, uncertainty}, pages = {105--124}, }
@article{ title = {An evolutionary Bayesian belief network methodology for optimum management of groundwater contamination}, type = {article}, year = {2009}, keywords = {Bayesian belief network,Evolutionary optimization,Multi-objective,Uncertainty,Water resources management}, pages = {303-310}, volume = {24}, websites = {http://www.sciencedirect.com/science/article/pii/S1364815208001527}, month = {3}, id = {a9ab7039-fc72-3b3b-a9fb-d02b3c6fb47d}, created = {2015-04-11T18:33:35.000Z}, accessed = {2015-02-19}, file_attached = {false}, profile_id = {95e10851-cdf3-31de-9f82-1ab629e601b0}, group_id = {71a29c65-85d2-3809-a3a1-fe4a94dc78d2}, last_modified = {2017-03-14T14:27:45.955Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, private_publication = {false}, abstract = {An integrated methodology, based on Bayesian belief network (BBN) and evolutionary multi-objective optimization (EMO), is proposed for combining available evidence to help water managers evaluate implications, including costs and benefits of alternative actions, and suggest best decision pathways under uncertainty. A Bayesian belief network is a probabilistic graphical model that represents a set of variables and their probabilistic relationships, which also captures historical information about these dependencies. In complex applications where the task of defining the network could be difficult, the proposed methodology can be used in validation of the network structure and the parameters of the probabilistic relationship. Furthermore, in decision problems where it is difficult to choose appropriate combinations of interventions, the states of key variables under the full range of management options cannot be analyzed using a Bayesian belief network alone as a decision support tool. The proposed optimization method is used to deal with complexity in learning about actions and probabilities and also to perform inference. The optimization algorithm generates the state variable values which are fed into the Bayesian belief network. It is possible then to calculate the probabilities for all nodes in the network (belief propagation). Once the probabilities of all the linked nodes have been updated, the objective function values are returned to the optimization tool and the process is repeated. The proposed integrated methodology can help in dealing with uncertainties in decision making pertaining to human behavior. It also eliminates the shortcoming of Bayesian belief networks in introducing boundary constraints on probability of state values of the variables. The effectiveness of the proposed methodology is examined in optimum management of groundwater contamination risks for a well field capture zone outside Copenhagen city.}, bibtype = {article}, author = {Farmani, Raziyeh and Henriksen, Hans Jørgen and Savic, Dragan}, doi = {10.1016/j.envsoft.2008.08.005}, journal = {Environmental Modelling & Software}, number = {3} }
@inproceedings{muller_effects_2009, title = {Effects of geometric imperfections to the control of redundantly actuated parallel manipulators}, doi = {10.1109/ROBOT.2009.5152183}, abstract = {The model-based control of robotic manipulators relies on an exact model of the manipulator. Redundantly actuated PKM posses the ability to exhibit internal prestress that does not affect its environment. This allows for a purposeful distribution of control forces, taking into account secondary tasks, such optimal force distribution, active stiffness, and backlash avoiding control. In the presence of kinematic uncertainties this feature can become a serious problem since then the control forces may be annihilated or even some of the intentional prestress components may interfere with the environment. The effect of such kinematic uncertainties and the application of standard model-based control schemes is analyzed in this paper. It is shown that, in the presence of model uncertainties, it leads to parasitic perturbation forces that can not be compensated by the controls. An amended version of the augmented PD and computed torque control scheme is proposed that removes the parasitic feedback forces.}, booktitle = {2009 {IEEE} {International} {Conference} on {Robotics} and {Automation}}, author = {Muller, A.}, month = may, year = {2009}, keywords = {Feedback, PD control, PD control scheme, Parallel robots, Robotics and automation, Solid modeling, Torque control, Uncertainty, active stiffness, backlash avoiding control, compensation, force control, geometric imperfection, geometric uncertainties, geometry, internal prestress, inverse dynamics, inverse problems, kinematic uncertainty, kinematics, manipulators, model-based control, model-based control scheme, optimal force distribution, parallel manipulator, parasitic feedback force control, parasitic perturbation force compensation, prestress, redundancy, redundant actuation, redundant manipulators, redundantly-actuated parallel robotic manipulator control, torque control scheme, uncertain systems}, pages = {1782--1787} }
@article{cataldoSoftwareDependenciesWork2009, title = {Software Dependencies, Work Dependencies, and Their Impact on Failures}, author = {Cataldo, Marcelo and Mockus, Audris and Roberts, Jeffrey A. and Herbsleb, James D.}, year = {2009}, month = nov, volume = {35}, pages = {864--878}, issn = {0098-5589}, doi = {10.1109/tse.2009.42}, abstract = {Prior research has shown that customer-reported software faults are often the result of violated dependencies that are not recognized by developers implementing software. Many types of dependencies and corresponding measures have been proposed to help address this problem. The objective of this research is to compare the relative performance of several of these dependency measures as they relate to customer-reported defects. Our analysis is based on data collected from two projects from two independent companies. Combined, our data set encompasses eight years of development activity involving 154 developers. The principal contribution of this study is the examination of the relative impact that syntactic, logical, and work dependencies have on the failure proneness of a software system. While all dependencies increase the fault proneness, the logical dependencies explained most of the variance in fault proneness, while workflow dependencies had more impact than syntactic dependencies. These results suggest that practices such as rearchitecting, guided by the network structure of logical dependencies, hold promise for reducing defects.}, journal = {IEEE Transactions on Software Engineering}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-5431315,~to-add-doi-URL,complexity,computational-science,integrated-modelling,integration-techniques,modelling,software-errors,software-uncertainty,uncertainty,workflow,workflow-dependencies}, lccn = {INRMM-MiD:c-5431315}, number = {6} }
@article{phillips_sample_2009, title = {Sample selection bias and presence-only distribution models: implications for background and pseudo-absence data}, volume = {19}, issn = {1051-0761}, shorttitle = {Sample selection bias and presence-only distribution models}, url = {http://www.esajournals.org/doi/full/10.1890/07-2153.1}, doi = {10.1890/07-2153.1}, number = {1}, urldate = {2010-05-05TZ}, journal = {Ecological Applications}, author = {Phillips, Steven J. and Dudík, Miroslav and Elith, Jane and Graham, Catherine H. and Lehmann, Anthony and Leathwick, John and Ferrier, Simon}, month = jan, year = {2009}, pages = {181--197} }
@article{terranovaSoilErosionRisk2009a, title = {Soil Erosion Risk Scenarios in the {{Mediterranean}} Environment Using {{RUSLE}} and {{GIS}}: {{An}} Application Model for {{Calabria}} (Southern {{Italy}})}, shorttitle = {Soil Erosion Risk Scenarios in the {{Mediterranean}} Environment Using {{RUSLE}} and {{GIS}}}, author = {Terranova, O. and Antronico, L. and Coscarelli, R. and Iaquinta, P.}, year = {2009}, month = nov, volume = {112}, pages = {228--245}, issn = {0169-555X}, doi = {10.1016/j.geomorph.2009.06.009}, abstract = {Soil erosion by water (WSE) has become a relevant issue at the Mediterranean level. In particular, natural conditions and human impact have made the Calabria (southern Italy) particularly prone to intense WSE. The purpose of this investigation is to identify areas highly affected by WSE in Calabria by comparing the scenarios obtained by assuming control and preventive measures and actions, as well as actual conditions generated by forest fires, also in the presence of conditions of maximum rainfall erosion. Geographic Information System techniques have been adopted to treat data of reasonable spatial resolution obtained at a regional scale for application to the RUSLE model. This work is based on the comparison of such data with a basic scenario that has been defined by the present situation (present scenario). In this scenario: (i) R has been assessed by means of an experimental relation adjusted to Calabria on the basis of 5-min observations; (ii) K has been drawn from the soil map of Calabria including 160 soilscapes; (iii) LS has been estimated according to the RUSLE2 model by using (among other subfactors) a 40-m square cell DTM; (iv) C has been derived by processing the data inferred from the project Corine Land Cover, whose legend includes 35 different land uses on three levels; and (v) P has been hypothesized as equal to 1. For the remaining three hypothesized scenarios, the RUSLE factors have been adjusted according to experimental data and to data in the literature. In particular, forest areas subject to fire have been randomly generated as far as fire location, extension, structure, and intensity are concerned. The values obtained by the application of the RUSLE model have emphasized that land management by means of measures and actions for reducing WSE causes a notable reduction of the erosive rate decreasing from \textasciitilde 30 to 12.3Mg ha-1 y-1. On the other hand, variations induced by hypothetical wildfires in forests on 10\% of the regional territory bring WSE over the whole region to values varying from 30 to 116Mg ha-1 y-1. This study can be offered to territorial planning authorities as an evaluation instrument as it highlights the merits and limitations of some territorial management actions. In fact, in Calabria no observations exist concerning the implications of these actions.}, journal = {Geomorphology}, keywords = {~INRMM-MiD:z-IBF7S4QQ,c-factor,data-uncertainty,erodibility,fire-severity,italy,modelling-uncertainty,post-fire-impacts,soil-erosion,soil-resources,uncertainty,wildfires}, language = {en}, lccn = {INRMM-MiD:z-IBF7S4QQ}, number = {3} }
@article{araujoEnsembleForecastingSpecies2007, title = {Ensemble Forecasting of Species Distributions}, author = {Araujo, M. and New, M.}, year = {2007}, month = jan, volume = {22}, pages = {42--47}, issn = {0169-5347}, doi = {10.1016/j.tree.2006.09.010}, abstract = {Concern over implications of climate change for biodiversity has led to the use of bioclimatic models to forecast the range shifts of species under future climate-change scenarios. Recent studies have demonstrated that projections by alternative models can be so variable as to compromise their usefulness for guiding policy decisions. Here, we advocate the use of multiple models within an ensemble forecasting framework and describe alternative approaches to the analysis of bioclimatic ensembles, including bounding box, consensus and probabilistic techniques. We argue that, although improved accuracy can be delivered through the traditional tasks of trying to build better models with improved data, more robust forecasts can also be achieved if ensemble forecasts are produced and analysed appropriately.}, journal = {Trends in Ecology \& Evolution}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-1030028,consensus,ensemble,modelling,robust-modelling,species-distribution,uncertainty}, lccn = {INRMM-MiD:c-1030028}, number = {1} }
@article{mansard_task_2007, title = {Task {Sequencing} for {High}-{Level} {Sensor}-{Based} {Control}}, volume = {23}, issn = {1552-3098}, doi = {10.1109/TRO.2006.889487}, abstract = {Classical sensor-based approaches tend to constrain all the degrees of freedom of a robot during the execution of a task. In this paper, a new solution is proposed. The key idea is to divide the global full-constraining task into several subtasks, which can be applied or inactivated to take into account potential constraints of the environment. Far from any constraint, the robot moves according to the full task. When it comes closer to a configuration to avoid, a higher level controller removes one or several subtasks, and activates them again when the constraint is avoided. The last controller ensures the convergence at the global level by introducing some look-ahead capabilities when a local minimum is reached. The robot accomplishes the global task by automatically sequencing sensor-based tasks, obstacle avoidance, and short deliberative phases. In this paper, a complete solution to implement this idea is proposed, along with several experiments that prove the validity of this approach}, number = {1}, journal = {IEEE Transactions on Robotics}, author = {Mansard, N. and Chaumette, F.}, month = feb, year = {2007}, keywords = {Automatic control, Avoidance, Motion control, Motion planning, Robot control, Robot sensing systems, Robotics and automation, Uncertainty, collision avoidance, high-level sensor-based control, mobile robots, obstacle avoidance, path planning, planning, redundancy, robot motion control, sensor-based control, sequences, task sequencing, tasks sequencing, visual servoing}, pages = {60--72} }
@article{magginiImprovingGeneralizedRegression2006, title = {Improving Generalized Regression Analysis for the Spatial Prediction of Forest Communities}, author = {Maggini, Ramona and Lehmann, Anthony and Zimmermann, Niklaus E. and Guisan, Antoine}, year = {2006}, month = oct, volume = {33}, pages = {1729--1749}, issn = {0305-0270}, doi = {10.1111/j.1365-2699.2006.01465.x}, abstract = {Aim\hspace{0.6em} This study used data from temperate forest communities to assess: (1) five different stepwise selection methods with generalized additive models, (2) the effect of weighting absences to ensure a prevalence of 0.5, (3) the effect of limiting absences beyond the environmental envelope defined by presences, (4) four different methods for incorporating spatial autocorrelation, and (5) the effect of integrating an interaction factor defined by a regression tree on the residuals of an initial environmental model. Location\hspace{0.6em} State of Vaud, western Switzerland. Methods\hspace{0.6em} Generalized additive models (GAMs) were fitted using the grasp package (generalized regression analysis and spatial predictions, http://www.cscf.ch/grasp). Results\hspace{0.6em} Model selection based on cross-validation appeared to be the best compromise between model stability and performance (parsimony) among the five methods tested. Weighting absences returned models that perform better than models fitted with the original sample prevalence. This appeared to be mainly due to the impact of very low prevalence values on evaluation statistics. Removing zeroes beyond the range of presences on main environmental gradients changed the set of selected predictors, and potentially their response curve shape. Moreover, removing zeroes slightly improved model performance and stability when compared with the baseline model on the same data set. Incorporating a spatial trend predictor improved model performance and stability significantly. Even better models were obtained when including local spatial autocorrelation. A novel approach to include interactions proved to be an efficient way to account for interactions between all predictors at once. Main conclusions\hspace{0.6em} Models and spatial predictions of 18 forest communities were significantly improved by using either: (1) cross-validation as a model selection method, (2) weighted absences, (3) limited absences, (4) predictors accounting for spatial autocorrelation, or (5) a factor variable accounting for interactions between all predictors. The final choice of model strategy should depend on the nature of the available data and the specific study aims. Statistical evaluation is useful in searching for the best modelling practice. However, one should not neglect to consider the shapes and interpretability of response curves, as well as the resulting spatial predictions in the final assessment.}, journal = {Journal of Biogeography}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-835166,~to-add-doi-URL,bias-correction,bioclimatic-predictors,correlation-analysis,forest-resources,habitat-suitability,statistics,uncertainty,weighting}, lccn = {INRMM-MiD:c-835166}, number = {10} }
@article{tornMissingFeedbacksAsymmetric2006, title = {Missing Feedbacks, Asymmetric Uncertainties, and the Underestimation of Future Warming}, author = {Torn, Margaret S. and Harte, John}, year = {2006}, month = may, volume = {33}, pages = {n/a}, issn = {0094-8276}, doi = {10.1029/2005gl025540}, abstract = {Historical evidence shows that atmospheric greenhouse gas (GhG) concentrations increase during periods of warming, implying a positive feedback to future climate change. We quantified this feedback for CO2 and CH4 by combining the mathematics of feedback with empirical ice-core information and general circulation model (GCM) climate sensitivity, finding that the warming of 1.5-4.5\textdegree C associated with anthropogenic doubling of CO2 is amplified to 1.6-6.0\textdegree C warming, with the uncertainty range deriving from GCM simulations and paleo temperature records. Thus, anthropogenic emissions result in higher final GhG concentrations, and therefore more warming, than would be predicted in the absence of this feedback. Moreover, a symmetrical uncertainty in any component of feedback, whether positive or negative, produces an asymmetrical distribution of expected temperatures skewed toward higher temperature. For both reasons, the omission of key positive feedbacks and asymmetrical uncertainty from feedbacks, it is likely that the future will be hotter than we think.}, journal = {Geophysical Research Letters}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-3308907,climate-change,climate-projections,feedback,ghg,global-warming,modelling,non-linearity,prediction-bias,temperature,uncertainty}, lccn = {INRMM-MiD:c-3308907}, number = {10} }
@article{tedeschiAssessmentAdequacyMathematical2006, title = {Assessment of the Adequacy of Mathematical Models}, author = {Tedeschi, Luis O.}, year = {2006}, month = sep, volume = {89}, pages = {225--247}, issn = {0308-521X}, doi = {10.1016/j.agsy.2005.11.004}, abstract = {Models are mathematical representations of mechanisms that govern natural phenomena that are not fully recognized, controlled, or understood. They have become indispensable tools via decision support systems for policy makers and researchers to provide ways to express the scientific knowledge. Model usefulness has to be assessed through its sustainability for a particular purpose. Adequate statistical analysis is an indispensable step during development, evaluation, and revision phases of a model. Therefore, in this paper we discussed and compared several techniques to evaluate mathematical models designed for predictive purposes. The identification and acceptance of wrongness of a model is an important step towards the development of more reliable and accurate models. The assessment of the adequacy of models is only possible through the combination of several statistical analyses and proper investigation regarding the purposes for which the mathematical model was initially conceptualized and developed for. The use of only a few techniques may be misleading in selecting the appropriate model in a given scenario.}, journal = {Agricultural Systems}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-10284648,accuracy,accuracy-vs-precision,assessment,modelling-uncertainty,precisely-wrong,uncertainty}, lccn = {INRMM-MiD:c-10284648}, number = {2-3} }
@article{pearsonModelbasedUncertaintySpecies2006, title = {Model-Based Uncertainty in Species Range Prediction}, author = {Pearson, Richard G. and Thuiller, Wilfried and Ara{\'u}jo, Miguel B. and {Martinez-Meyer}, Enrique and Brotons, Ll{\'u}{\i}s and McClean, Colin and Miles, Lera and Segurado, Pedro and Dawson, Terence P. and Lees, David C.}, year = {2006}, month = oct, volume = {33}, pages = {1704--1711}, issn = {0305-0270}, doi = {10.1111/j.1365-2699.2006.01460.x}, abstract = {[Aim]\hspace{0.6em} Many attempts to predict the potential range of species rely on environmental niche (or 'bioclimate envelope') modelling, yet the effects of using different niche-based methodologies require further investigation. Here we investigate the impact that the choice of model can have on predictions, identify key reasons why model output may differ and discuss the implications that model uncertainty has for policy-guiding applications. [Location]\hspace{0.6em} The Western Cape of South Africa. [Methods]\hspace{0.6em} We applied nine of the most widely used modelling techniques to model potential distributions under current and predicted future climate for four species (including two subspecies) of Proteaceae. Each model was built using an identical set of five input variables and distribution data for 3996 sampled sites. We compare model predictions by testing agreement between observed and simulated distributions for the present day (using the area under the receiver operating characteristic curve (AUC) and kappa statistics) and by assessing consistency in predictions of range size changes under future climate (using cluster analysis). [Results]\hspace{0.6em} Our analyses show significant differences between predictions from different models, with predicted changes in range size by 2030 differing in both magnitude and direction (e.g. from 92\,\% loss to 322\,\% gain). We explain differences with reference to two characteristics of the modelling techniques: data input requirements (presence/absence vs. presence-only approaches) and assumptions made by each algorithm when extrapolating beyond the range of data used to build the model. The effects of these factors should be carefully considered when using this modelling approach to predict species ranges. [Main conclusions]\hspace{0.6em} We highlight an important source of uncertainty in assessments of the impacts of climate change on biodiversity and emphasize that model predictions should be interpreted in policy-guiding applications along with a full appreciation of uncertainty.}, journal = {Journal of Biogeography}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-591817,~to-add-doi-URL,biodiversity,climate-change,communicating-uncertainty,habitat-suitability,modelling-uncertainty,niche-modelling,science-policy-interface,scientific-communication,species-distribution,uncertainty}, lccn = {INRMM-MiD:c-591817}, number = {10} }
@article{ludwig_uncertainty_2005, title = {Uncertainty in discount models and environmental accounting}, volume = {10}, number = {2}, journal = {Ecology and Society}, author = {Ludwig, Donald. and Brock, William. and Carpenter, Stephen R.}, year = {2005}, keywords = {NTL, eutrophication, ecosystem service, uncertainty, ecological economics, atlantic right whale, cost-benefit analysis, discounting, renewable resource} }
@article{araujoWouldClimateChange2004, title = {Would Climate Change Drive Species out of Reserves? {{An}} Assessment of Existing Reserve-Selection Methods}, author = {Araujo, Miguel B. and Cabeza, Mar and Thuiller, Wilfried and Hannah, Lee and Williams, Paul H.}, year = {2004}, month = sep, volume = {10}, pages = {1618--1626}, issn = {1354-1013}, doi = {10.1111/j.1365-2486.2004.00828.x}, abstract = {Concern for climate change has not yet been integrated in protocols for reserve selection. However if climate changes as projected, there is a possibility that current reserve-selection methods might provide solutions that are inadequate to ensure species' long-term persistence within reserves. We assessed, for the first time, the ability of existing reserve-selection methods to secure species in a climate-change context. Six methods using a different combination of criteria (representation, suitability and reserve clustering) are compared. The assessment is carried out using European distributions of 1200 plant species and considering two extreme scenarios of response to climate change: no dispersal and universal dispersal. With our data, 6-11\,\% of species modelled would be potentially lost from selected reserves in a 50-year period. Measured uncertainties varied in 6\,\% being 1-3\,\% attributed to dispersal assumptions and 2-5\,\% to the choice of reserve-selection method. Suitability approaches to reserve selection performed best, while reserve clustering performed poorly. We also found that 5\,\% of species modelled would lose their entire climatic envelope in the studied area; 2\,\% of the species modelled would have nonoverlapping distributions; 93\,\% of the species modelled would maintain varying levels of overlapping distributions. We conclude there are opportunities to minimize species' extinctions within reserves but new approaches are needed to account for impacts of climate change on species; especially for those projected to have temporally nonoverlapping distributions.}, journal = {Global Change Biology}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-201812,~to-add-doi-URL,climate-change,clustering,conservation,europe,species-dispersal,species-distribution,uncertainty}, lccn = {INRMM-MiD:c-201812}, number = {9} }
@article{ title = {A methodology to assess the profitability of Bt-cotton: case study results from the state of Karnataka, India}, type = {article}, year = {2004}, identifiers = {[object Object]}, keywords = {Bt cotton,Economic assessment,Profitability,Uncertainty}, pages = {1249-1257}, volume = {23}, websites = {http://www.sciencedirect.com/science/article/B6T5T-4D1YXNJ-1/2/e155087123c544361c8f90b733984b76}, id = {f6435a38-6c3b-3452-abe8-b1239c3439da}, created = {2012-01-05T13:08:30.000Z}, file_attached = {false}, profile_id = {1a467167-0a41-3583-a6a3-034c31031332}, group_id = {0e532975-1a47-38a4-ace8-4fe5968bcd72}, last_modified = {2012-01-05T13:14:50.000Z}, tags = {Bt cotton,India,developing countries,economic,environmental,pesticide use,producer income and expenses,productivity}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, source_type = {Journal Article}, abstract = {Bt-cotton varieties can control lepidopterous pests, hence offering the possibility to reduce chemical pesticide use. India, with the largest cotton-growing area globally, gave commercial approval for Bt cotton in 2002 and a rapid adoption of the technology is expected. This paper uses a stochastic partial budgeting approach that captures the key pest control properties of Bt cotton taking into account uncertainty of pest pressure, control effectiveness and prices to assess the profitability effects of Bt varieties and hence complements previous studies that generally excluded such issues. Results of the simulation model reveal that under the current price situation a prophylactic chemical control strategy dominates the use of Bt varieties in both, irrigated and non-irrigated cotton. The effect of a higher cotton price is assessed in a second scenario that depicts a Bt cotton variety with improved fiber quality than varieties currently approved for commercial planting. Under this assumption, the Bt strategy would be slightly better than the prophylactic use of chemical pesticides. The model can be extended to include pests other than the bollworm and correlations among variables, e.g. prices and yield, provided sufficient evidence for such correlation exists. Results of this analysis show the impact of uncertainty in the main variables that influence the profitability of Bt cotton and alternative crop protection methods.}, bibtype = {article}, author = {Pemsl, D and Waibel, H and Orphal, J}, journal = {Crop Protection}, number = {12} }
@article{ title = {Ecological responses to recent climate change}, type = {article}, year = {2002}, identifiers = {[object Object]}, pages = {389-395}, volume = {416}, websites = {http://dx.doi.org/10.1038/416389a}, id = {8df9a2db-99ef-3ba6-898e-d4e6c32695d8}, created = {2011-02-22T18:00:53.000Z}, file_attached = {false}, profile_id = {c04350e2-ca59-3023-9537-35726b8dc7ec}, group_id = {3addd0f7-d578-34d3-be80-24022cc062a1}, last_modified = {2019-06-04T14:58:47.448Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {Walther2002}, source_type = {Journal Article}, notes = {M3: 10.1038/416389a; 10.1038/416389a<m:linebreak></m:linebreak>Journal Article}, private_publication = {false}, bibtype = {article}, author = {Walther, G and Post, E and Convey, P and Menzel, A and Parmesan, C and Beebee, T J C and Fromentin, J and Hoegh-Guldberg, O and Bairlein, F and Anonymous, undefined}, journal = {Nature}, number = {6879} }
@article{lempertNewDecisionSciences2002, title = {A New Decision Sciences for Complex Systems}, author = {Lempert, Robert J.}, year = {2002}, month = may, volume = {99}, pages = {7309--7313}, issn = {1091-6490}, doi = {10.1073/pnas.082081699}, abstract = {Models of complex systems can capture much useful information but can be difficult to apply to real-world decision-making because the type of information they contain is often inconsistent with that required for traditional decision analysis. New approaches, which use inductive reasoning over large ensembles of computational experiments, now make possible systematic comparison of alternative policy options using models of complex systems. This article describes Computer-Assisted Reasoning, an approach to decision-making under conditions of deep uncertainty that is ideally suited to applying complex systems to policy analysis. The article demonstrates the approach on the policy problem of global climate change, with a particular focus on the role of technology policies in a robust, adaptive strategy for greenhouse gas abatement.}, journal = {Proceedings of the National Academy of Sciences}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-8246659,climate-change,complexity,computational-science,deep-uncertainty,modelling,robust-modelling,science-based-decision-making,uncertainty}, lccn = {INRMM-MiD:c-8246659}, number = {suppl 3} }
@article{liskiIncreasingCarbonStocks2002, title = {Increasing Carbon Stocks in the Forest Soils of Western {{Europe}}}, author = {Liski, Jari and Perruchoud, Daniel and Karjalainen, Timo}, year = {2002}, month = sep, volume = {169}, pages = {159--175}, issn = {0378-1127}, doi = {10.1016/s0378-1127(02)00306-7}, abstract = {The soils of western European forests may be accumulating carbon, because tree biomass has been expanding in these forests already for decades, and the more numerous and larger trees can produce more litter. We calculated the carbon budget of soils and trees in the forests of 14 EU countries plus Norway and Switzerland from 1950 to 2040 by integrating forest resource information (inventory data from 1950 to 1990 and a forest resource forecast from 2000 to 2040), biomass allocation and turnover information, and a dynamic soil carbon model. The carbon stock of the soils increased throughout the studied period. In 1990, the soil carbon sink was 26 Tg per year. This is 32 or 48\,\% compared with our two estimates of the tree carbon sink for that year. Until 2040, the soil carbon sink was estimated to increase to 43 Tg per year. This would already be 61 or 69\,\% compared with the tree carbon sink that year. In 1990, the soils contributed most to the total forest carbon sink in central Europe, where the soil carbon sink was almost as large as the tree carbon sink. The soils were least important in southern Europe, where the soil carbon sink was less than 25\,\% compared with the tree carbon sink. In the future, the contribution of the soils to the total forest carbon sink was estimated to increase everywhere except in southern Europe. The soil carbon stocks increased mainly because litter fall from living trees increased while the other sources of soil carbon, i.e. the residues of harvests and natural disturbances, varied less. This litter fall was also the largest source of soil carbon accounting for 70-80\,\% of the total. The soil carbon stocks in these forests could thus be most effectively controlled by forest management actions, such as the choices of harvest regimes or tree species, which especially affect the litter production of living trees. According to an uncertainty analysis, we may have overestimated the soil carbon sink by 35\,\% or underestimated it by 50\,\% throughout the studied period. The largest uncertainties were related to calculating the litter production of living trees and decomposition in soil.}, journal = {Forest Ecology and Management}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-13363486,carbon-stock,forest-resources,soil-resources,uncertainty,western-europe}, lccn = {INRMM-MiD:c-13363486}, number = {1-2} }
@article{pacalaConsistentLandAtmospherebased2001, title = {Consistent Land- and Atmosphere-Based {{U}}.{{S}}. Carbon Sink Estimates}, author = {Pacala, S. W. and Hurtt, G. C. and Baker, D. and Peylin, P. and Houghton, R. A. and Birdsey, R. A. and Heath, L. and Sundquist, E. T. and Stallard, R. F. and Ciais, P. and Moorcroft, P. and Caspersen, J. P. and Shevliakova, E. and Moore, B. and Kohlmaier, G. and Holland, E. and Gloor, M. and Harmon, M. E. and Fan, S. M. and Sarmiento, J. L. and Goodale, C. L. and Schimel, D. and Field, C. B.}, year = {2001}, volume = {292}, pages = {2316--2320}, issn = {1095-9203}, doi = {10.1126/science.1057320}, abstract = {For the period 1980-89, we estimate a carbon sink in the coterminous United States between 0.30 and 0.58 petagrams of carbon per year (petagrams of carbon = 1015 grams of carbon). The net carbon flux from the atmosphere to the land was higher, 0.37 to 0.71 petagrams of carbon per year, because a net flux of 0.07 to 0.13 petagrams of carbon per year was exported by rivers and commerce and returned to the atmosphere elsewhere. These land-based estimates are larger than those from previous studies (0.08 to 0.35 petagrams of carbon per year) because of the inclusion of additional processes and revised estimates of some component fluxes. Although component estimates are uncertain, about one-half of the total is outside the forest sector. We also estimated the sink using atmospheric models and the atmospheric concentration of carbon dioxide (the tracer-transport inversion method). The range of results from the atmosphere-based inversions contains the land-based estimates. Atmosphere- and land-based estimates are thus consistent, within the large ranges of uncertainty for both methods. Atmosphere-based results for 1980-89 are similar to those for 1985-89 and 1990-94, indicating a relatively stable U.S. sink throughout the period.}, journal = {Science}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-14007180,carbon-cycle,forest-resources,uncertainty,united-states}, lccn = {INRMM-MiD:c-14007180}, number = {5525} }
@Article{ Caflisch_1998aa, author = {Caflisch, Russel E.}, doi = {10.1017/S0962492900002804}, file = {Caflisch_1998aa.pdf}, group = {casper}, issn = {1474-0508}, journal = {Acta Numerica}, keywords = {monte-carlo,uncertainty,quadrature}, langid = {english}, month = jan, pages = {1--49}, title = {Monte {Carlo} and quasi-{Monte} {Carlo} methods}, volume = {7}, year = {1998}, shortjournal = {Acta. Num.} }
@article{ title = {Visualizing Georeferenced data: Representing reliability of health statistics}, type = {article}, year = {1998}, keywords = {Lung-cancer,display,maps,reliability,uncertainty}, volume = {30}, websites = {http://www.geovista.psu.edu/publications/MacEachren/MacEachren_Visualizing_98.pdf}, id = {977979cd-5620-3f07-9c2d-215e9b1a1ba8}, created = {2018-05-29T14:06:29.596Z}, file_attached = {false}, profile_id = {6d8d7993-9618-3f6c-983a-9f6761313797}, group_id = {4f1d95d1-59ee-3ce8-85ce-055cfae2da74}, last_modified = {2018-05-29T14:06:29.596Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {13130}, source_type = {article}, notes = {NCHS}, private_publication = {false}, abstract = {The power of human vision to synthesize information and recognize pattern is fundamental to the success of visualization as a scientific method. This same power call mislead investigators who use visualization to explore georeferenced data-if data reliability is not addressed directly in the visualization process. Here, we apply an integrated cognitive-semiotic approach to devise and test three methods for depicting reliability of georeferenced health data. The first method makes use of adjacent maps, one for data and one for reliability. This form of paired representation is compared to two methods in which data and reliability are spatially coincident ton a single map). A novel method for coincident visually separable depiction of data and data reliability on mortality maps (using a color fill to represent data and a texture overlay to represent reliability) is found to be effective in allowing map users to recognize unreliable data without interfering with their ability to notice clusters and characterize patterns in mortality rates. A coincident visually integral depiction (using color characteristics to represent both data and reliability) is found to inhibit perception of clusters that contain some enumeration units with unreliable data, and to make it difficult for users to consider data and reliability independently.}, bibtype = {article}, author = {Alan, M MacEachren and Brewer, C A and Linda, W Pickle}, journal = {Environment and Planning: A} }
@inproceedings{kock_parallel_1998, title = {A parallel x-y manipulator with actuation redundancy for high-speed and active-stiffness applications}, volume = {3}, doi = {10.1109/ROBOT.1998.680665}, abstract = {A 2-DOF parallel manipulator with actuation redundancy is examined for high-speed and stiffness-controlled operation. Advantages of actuation redundancy are outlined. The kinematics and singularity-free workspace of the manipulator are presented together with a force transmission analysis. Finally, a novel control scheme that guarantees a lower bound of the end-effector stiffness (LBSC) is presented. Simulation results are compared with a traditional control scheme for high-speed applications using the minimal 2-norm of actuator torques}, booktitle = {Proceedings. 1998 {IEEE} {International} {Conference} on {Robotics} and {Automation} ({Cat}. {No}.98CH36146)}, author = {Kock, S. and Schumacher, W.}, month = may, year = {1998}, keywords = {2-DOF parallel manipulator, Control engineering, Haptic interfaces, Jacobian matrices, Leg, Parallel robots, Torque control, Uncertainty, active-stiffness, actuation redundancy, actuators, force control, force transmission analysis, kinematics, manipulator kinematics, manipulators, minimal 2-norm, parallel x-y manipulator, redundancy, singularity-free workspace, stiffness-controlled operation, to cite}, pages = {2295--2300 vol.3} }
@article{ title = {Rule-based versus probabilistic approaches to the diagnosis of faults in wastewater treatment processes}, type = {article}, year = {1996}, keywords = {Bayesian belief networks,causal belief networks,diagnosis,expert systems,rulebased systems,uncertainty,wastewater treatment}, pages = {265-273}, volume = {10}, websites = {http://www.sciencedirect.com/science/article/pii/0954181096000039}, month = {8}, id = {05bfed08-1d1b-3209-8777-170c9795ee3d}, created = {2015-04-11T18:46:33.000Z}, accessed = {2015-04-11}, file_attached = {false}, profile_id = {95e10851-cdf3-31de-9f82-1ab629e601b0}, group_id = {b4696632-111d-356a-8c67-a11d7f5f1174}, last_modified = {2017-03-14T14:28:31.243Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, private_publication = {false}, abstract = {The need for computer-based diagnostic tools in wastewater management is outlined. Rule-based and probabilistic approaches to the development of diagnostic expert systems are critically reviewed, and it is demonstrated that the rule-based approach has serious limitations which make it unsuitable for diagnostic tasks under conditions of uncertainty. It is shown that Bayesian belief networks (BBNs), a probabilistic approach, has none of these limitations and is well-suited to diagnosis under uncertainty. The theory and application of BBNs are outlined and illustrated by a simple example based on a wastewater treatment plant. A brief case study is presented of the development of a full-scale BBN for the diagnosis of faults in a wastewater treatment plant. It is concluded that BBNs are far superior to rule-based systems in their ability to diagnose faults in complex systems like wastewater treatment processes, whose behaviour is inherently uncertain.}, bibtype = {article}, author = {Chong, H.G. and Walley, W.J.}, doi = {10.1016/0954-1810(96)00003-9}, journal = {Artificial Intelligence in Engineering}, number = {3} }
@misc{ title = {Mapping health statistics: Representing data reliability}, type = {misc}, year = {1995}, source = {Proceedings of the 17th International Cartographic Conference}, keywords = {choropleth,health maps,merger,overlay,reliability,uncertainty,\textdaggervisualization}, websites = {http://www.geovista.psu.edu/publications/ica1995/MacEachren_ Mapping health statistics.pdf}, publisher = {International Cartographic Association}, city = {Barcelona, Spain, September 3-9}, id = {9a3fad46-c343-366f-8650-e86049024967}, created = {2018-05-29T14:05:56.732Z}, file_attached = {false}, profile_id = {6d8d7993-9618-3f6c-983a-9f6761313797}, group_id = {4f1d95d1-59ee-3ce8-85ce-055cfae2da74}, last_modified = {2018-05-29T14:05:56.732Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {13129}, source_type = {booklet}, notes = {NCHS}, private_publication = {false}, bibtype = {misc}, author = {Alan, M MacEachren and Brewer, C A and Linda, W Pickle} }
@article{ title = {Aspects of reasoning with uncertainty in an agricultural GIS environment}, type = {article}, year = {1995}, keywords = {GIS,decision making,reasoning,uncertainty}, volume = {1}, id = {a34ad69f-7379-371b-88cd-1df0fa8ff551}, created = {2018-05-29T14:06:14.883Z}, file_attached = {false}, profile_id = {6d8d7993-9618-3f6c-983a-9f6761313797}, group_id = {4f1d95d1-59ee-3ce8-85ce-055cfae2da74}, last_modified = {2018-05-29T14:06:14.883Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {13347}, source_type = {article}, private_publication = {false}, bibtype = {article}, author = {Caccetta, P and Campbell, N and West, G and Kiiveri, H and Gahegan, M}, journal = {Applied Expert Systems} }
@book{ title = {<i>SOME </i>Truth with Maps: A Primer on Symbolization and Design}, type = {book}, year = {1994}, keywords = {data classification,data models,figure-ground,map use,symbol-referent,symbolization,text,uncertainty,visualization,\textdaggermap design}, publisher = {Assocation of American Geographers}, city = {Washington, D. C.}, institution = {Assocation of American Geographers}, series = {AAG Resource Publications in Geography}, id = {5f23c224-2374-3158-ac03-200306ce0175}, created = {2018-05-29T14:06:37.225Z}, file_attached = {false}, profile_id = {6d8d7993-9618-3f6c-983a-9f6761313797}, group_id = {4f1d95d1-59ee-3ce8-85ce-055cfae2da74}, last_modified = {2018-05-29T14:06:37.225Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {13114}, source_type = {book}, private_publication = {false}, bibtype = {book}, author = {Alan, M MacEachren} }
@article{lehmanUncertaintyComputerApplication1989, title = {Uncertainty in Computer Application and Its Control through the Engineering of Software}, author = {Lehman, M. M.}, year = {1989}, volume = {1}, pages = {3--27}, issn = {2047-7481}, doi = {10.1002/smr.4360010103}, abstract = {Computers are being applied more and more broadly to address applications in all areas of human activity, penetrating ever deeper into the very fabric of society. As a consequence, mankind is becoming, collectively and individually, ever more dependent on software and on the integrity of that software. In this context the term software includes both the systems software that constitutes a fundamental part of the operational configuration and the programs that implement each individual application. Integrity is a many faceted concept that has to do with the availability of programs whenever they are needed and their correctness in relation to the circumstances at the moment of execution or, more precisely, when the results of computation are applied. A program must produce a solution that is correct and relevant when used. It must continue to do so whenever required over the lifetime of an application and of the systems that realize and support it. All this is required despite continuing change in a dynamic world. This paper discusses some properties of software and of software technology and identifies fundamental issues that must be addressed if program integrity is to be achieved initially, and if a program is to be maintained as satisfactory in a continuously changing operational environment. The discussion leads to formulation of a 'Principle of Uncertainty' that applies, in general, to all computer applications in the real world. The principle follows from the fact that any program is a model, albeit many times removed by abstraction and reification from the real world it reflects and addresses. The consequences of this basic fact lead to a need for a disciplined technology associated with a controlled process for definition of each application, its operational domain, the envisaged system and software, and for its development, application and evolution (maintenance). This paper concludes with a brief mention of the implications of the analysis on relevant technological issues. Fundamental concepts and observations that underlie the development of a software engineering discipline and its supporting technology are introduced. Software engineering is seen as the discipline that permits one to limit uncertainty and its consequences through the introduction and control of appropriate development processes and the systematic and disciplined applications of methods and tools. The emergence of such tools, of computer assisted software engineering (CASE) and of programming and project support environments (IPSEs) is briefly discussed. Finally the paper addresses issues that arise in transferring this still developing technology to industry and introducing it into practice, outlining briefly how this may be approached.}, journal = {Journal of Software Maintenance: Research and Practice}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-11691733,communicating-uncertainty,complexity,software-engineering,software-errors,software-uncertainty,uncertainty}, lccn = {INRMM-MiD:c-11691733}, number = {1} }
@inproceedings{xiao_replanning_1989, title = {On replanning for assembly tasks using robots in the presence of uncertainties}, doi = {10.1109/ROBOT.1989.100056}, abstract = {High-precision assembly tasks cannot be successfully done by robots without taking into account the uncertainties that can cause failure of robot motion. The authors address this problem by planning robot motions at two levels: nominal planning, which assumes no uncertainty, and dynamic replanning, to deal with uncertainties that would cause nominal plans to fail. They introduce a replanning approach based on knowledge of contacts among assembly parts. It consists of patch planning to resolve the case when a commanded robot motion prematurely stops at a contact other than those planned, and motion strategy planning, to regulate robot motions in order to guarantee the eventual success of a task. A task-independent strategy for patch-plan generation based on concepts of contact planes and abstract obstacles is developed. It is also shown how to apply motion strategies so that, under proper design and motion constraints, the replanning can be guaranteed to succeed}, booktitle = {1989 {International} {Conference} on {Robotics} and {Automation} {Proceedings}}, author = {Xiao, J. and Volz, R. A.}, month = may, year = {1989}, keywords = {Assembly systems, Control systems, Motion control, Motion planning, Robot motion, Robot sensing systems, Robotic assembly, Robotics and automation, Strategic planning, Uncertainty, assembling, dynamic replanning, high-precision assembly, industrial robots, manufacturing computer control, motion strategy planning, nominal planning, patch planning, position control, robots, uncertainties}, pages = {638--645 vol.2} }
@article{ title = {Map use and map making education: Attention to sources of geographic information}, type = {article}, year = {1986}, keywords = {education: maps,generalization,map users,terrain maps,uncertainty,visualization quality,\textdaggercartometry}, volume = {23}, websites = {http://www.geovista.psu.edu/publications/MacEachren/MacEachren_MapUse&MapMakingEducation_86.pdf}, id = {87ce20ad-342f-38d5-8c94-fcdfe38032b9}, created = {2018-05-29T14:06:25.627Z}, file_attached = {false}, profile_id = {6d8d7993-9618-3f6c-983a-9f6761313797}, group_id = {4f1d95d1-59ee-3ce8-85ce-055cfae2da74}, last_modified = {2018-05-29T14:06:25.627Z}, read = {false}, starred = {false}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {13102}, source_type = {article}, notes = {_J}, private_publication = {false}, bibtype = {article}, author = {Alan, M MacEachren}, journal = {Cartographic Journal} }
@article{purkus_handling_????, title = {Handling uncertainty in bioenergy policy design – {A} case study analysis of {UK} and {German} bioelectricity policy instruments}, issn = {0961-9534}, url = {http://www.sciencedirect.com/science/article/pii/S0961953415001154}, doi = {10.1016/j.biombioe.2015.03.029}, abstract = {In designing policies to promote bioenergy, policy makers face challenges concerning uncertainties about the sustainability of bioenergy pathways (including greenhouse gas balances), technology and resource costs, or future energy market framework conditions. New information becomes available with time, but policy adjustments can involve high levels of adaptation costs. To enable an effective steering of technology choices and innovation, policies have to strike a balance between creating a consistent institutional framework, which establishes planning security for investors, and sufficient flexibility to adapt to new information. This paper examines implications of economic theory for handling cost and benefit uncertainty in bioelectricity policy design, focussing on choices between price and quantity instruments, technology differentiation, and policy adjustment. Findings are applied to two case studies, the UK's Renewables Obligation and the German feed-in tariff/feed-in premium scheme. Case study results show the trade-offs that are involved in instrument choice and design – depending on political priorities and a country's specific context, different options can prove more adequate. Combining market-based remuneration with sustainability criteria results in strong incentives for bioenergy producers to search for low-cost solutions; whereas cost-based price instruments with centrally steered technology and feedstock choices offer higher planning security for investors and more direct control for policy makers over what pathways are implemented. Independent of the choice of instrument type and technology differentiation mechanism, findings emphasise the importance of a careful policy design, which determines the exact balance between performance criteria such as cost control, incentive intensity, planning security and adaptive efficiency.}, urldate = {2015-04-18}, journal = {Biomass and Bioenergy}, author = {Purkus, Alexandra and Röder, Mirjam and Gawel, Erik and Thrän, Daniela and Thornley, Patricia}, keywords = {Bioenergy policy, Electricity sector, Instruments, New institutional economics, Renewable energy policy, uncertainty}, file = {ScienceDirect Full Text PDF:files/51189/Purkus et al. - Handling uncertainty in bioenergy policy design – .pdf:application/pdf;ScienceDirect Snapshot:files/51190/S0961953415001154.html:text/html} }
@article{bistline_electric_????, title = {Electric {Sector} {Capacity} {Planning} under {Uncertainty}: {Climate} {Policy} and {Natural} {Gas} in the {US}}, issn = {0140-9883}, shorttitle = {Electric {Sector} {Capacity} {Planning} under {Uncertainty}}, url = {http://www.sciencedirect.com/science/article/pii/S0140988315002157}, doi = {10.1016/j.eneco.2015.07.008}, abstract = {This research investigates the dynamics of capacity planning and dispatch in the US electric power sector under a range of technological, economic, and policy-related uncertainties. Using a two-stage stochastic programming approach, model results suggest that the two most critical risks in the near-term planning process of the uncertainties considered here are natural gas prices and the stringency of climate policy. Stochastic strategies indicate that some near-term hedging from lower-cost wind and nuclear may occur but robustly demonstrate that delaying investment and waiting for more information can be optimal to avoid stranding capital-intensive assets. Hedging strategies protect against downside losses while retaining the option value of deferring irreversible commitments until more information is available about potentially lucrative market opportunities. These results are explained in terms of the optionality of investments in the electric power sector, leading to more general insights about uncertainty, learning, and irreversibility. The stochastic solution is especially valuable if decision-makers do not sufficiently account for the potential of climate constraints in future decades or if fuel price projections are outdated.}, urldate = {2015-07-31}, journal = {Energy Economics}, author = {Bistline, John E.}, keywords = {climate policy, Electricity, risk management, stochastic programming, uncertainty} }
@article{soderbergRisingPolicyConflicts2013, title = {Rising Policy Conflicts in {{Europe}} over Bioenergy and Forestry}, author = {Söderberg, Charlotta and Eckerberg, Katarina}, date = {2013-08}, journaltitle = {Forest Policy and Economics}, volume = {33}, pages = {112--119}, issn = {1389-9341}, doi = {10.1016/j.forpol.2012.09.015}, url = {https://doi.org/10.1016/j.forpol.2012.09.015}, abstract = {[Highlights] [::] EU Bioenergy policy cuts across forest, agriculture, energy and transport sectors. [::] Increased pressure on forest biomass risks putting EU in a wood-deficit situation. [::] Bioenergy conflicts regard land use, biodiversity, climate and sustainability. [::] Conflicts on environmental consequences from bioenergy policy are reconcilable. [::] Conflicts on globally shared rights and responsibilities are not easily reconciled. [Abstract] Growing concerns over emissions of green-house gases causing climate change as well as energy security concerns have spurred the interest in bioenergy production pushed by EU targets to fulfil the goal of 20~per cent renewable energy in 2020, as well as the goal of 10~per cent renewable fuels in transport by 2020. Increased bioenergy production is also seen to have political and economic benefits for rural areas and farming regions in Europe and in the developing world. There are, however, conflicting views on the potential benefits of large scale bioenergy production, and recent debates have also drawn attention to a range of environmental and socio-economic issues that may arise in this respect. One of these challenges will be that of accommodating forest uses - including wood for energy, and resulting intensification of forest management - with biodiversity protection in order to meet EU policy goals. We note that the use of biomass and biofuels spans over several economic sector policy areas, which calls for assessing and integrating environmental concerns across forest, agriculture, energy and transport sectors. In this paper, we employ frame analysis to identify the arguments for promoting bioenergy and assess the potential policy conflicts in the relevant sectors, through the analytical lens of environmental policy integration. We conclude that while there is considerable leverage of environmental arguments in favour of bioenergy in the studied economic sectors, and potential synergies with other policy goals, environmental interest groups remain sceptical to just how bioenergy is currently being promoted. There is a highly polarised debate particularly relating to biofuel production. Based on our analysis, we discuss the potential for how those issues could be reconciled drawing on the frame conflict theory, distinguishing between policy disagreements and policy controversies.}, keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-11738844,~to-add-doi-URL,bioenergy,biomass,europe,forest-resources,ghg,science-policy-interface,uncertainty} }